From adb5be5b71a87de85d0397da2c72673a5730a675 Mon Sep 17 00:00:00 2001 From: Ben Fulton Date: Fri, 10 Jun 2016 12:19:58 -0400 Subject: [PATCH 01/86] Add a password expiration config option Add a "last_password_change" field to user table and user object. Add a config option "password expiration_period" to allow admins to require users to change passwords every so many days. On login, check for the presence of the config option and if the user pw has expired, redirect to the "change password" page. --- config/galaxy.ini.sample | 6 ++++- lib/galaxy/config.py | 1 + lib/galaxy/model/__init__.py | 2 ++ lib/galaxy/model/mapping.py | 1 + .../0132_add_lastpasswordchange_to_user.py | 18 +++++++++++++ lib/galaxy/webapps/galaxy/controllers/user.py | 25 ++++++++++++++++--- templates/user/change_password.mako | 12 +++++++++ 7 files changed, 61 insertions(+), 4 deletions(-) create mode 100755 lib/galaxy/model/migrate/versions/0132_add_lastpasswordchange_to_user.py diff --git a/config/galaxy.ini.sample b/config/galaxy.ini.sample index 008e093a8706..6fb82b66af2e 100644 --- a/config/galaxy.ini.sample +++ b/config/galaxy.ini.sample @@ -418,7 +418,11 @@ paste.app_factory = galaxy.web.buildapp:app_factory # In use only if activation_grace_period is set. #inactivity_box_content = Your account has not been activated yet. Feel free to browse around and see what's available, but you won't be able to upload data or run jobs until you have verified your email address. - +# Password expiration period (in days). Users are required to change their +# password every x days. Users will be redirected to the change password +# screen when they log in after their password expires. Enter 0 to disable +# password expiration. +#password_expiration_period = 0 # Galaxy Session Timeout # This provides a timeout (in minutes) after which a user will have to log back in. diff --git a/lib/galaxy/config.py b/lib/galaxy/config.py index 3924d46ff6c9..8566e0918813 100644 --- a/lib/galaxy/config.py +++ b/lib/galaxy/config.py @@ -113,6 +113,7 @@ def __init__( self, **kwargs ): self.expose_user_name = kwargs.get( "expose_user_name", False ) self.expose_user_email = kwargs.get( "expose_user_email", False ) + self.password_expiration_period = timedelta(days=int(kwargs.get( "password_expiration_period", 0 ))) # Check for tools defined in the above non-shed tool configs (i.e., tool_conf.xml) tht have # been migrated from the Galaxy code distribution to the Tool Shed. diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 4e767a2ab280..3aa7985587d4 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -166,6 +166,7 @@ def __init__( self, email=None, password=None ): self.active = False self.activation_token = None self.username = None + self.last_password_change = None # Relationships self.histories = [] self.credentials = [] @@ -179,6 +180,7 @@ def set_password_cleartext( self, cleartext ): self.password = galaxy.security.passwords.hash_password( cleartext ) else: self.password = new_secure_hash( text_type=cleartext ) + self.last_password_change = datetime.now() def check_password( self, cleartext ): """ diff --git a/lib/galaxy/model/mapping.py b/lib/galaxy/model/mapping.py index 7ba4d42068bb..0c49a6a2161e 100644 --- a/lib/galaxy/model/mapping.py +++ b/lib/galaxy/model/mapping.py @@ -33,6 +33,7 @@ Column( "email", TrimmedString( 255 ), index=True, nullable=False ), Column( "username", TrimmedString( 255 ), index=True, unique=True ), Column( "password", TrimmedString( 255 ), nullable=False ), + Column( "last_password_change", DateTime, default=now ), Column( "external", Boolean, default=False ), Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ), Column( "deleted", Boolean, index=True, default=False ), diff --git a/lib/galaxy/model/migrate/versions/0132_add_lastpasswordchange_to_user.py b/lib/galaxy/model/migrate/versions/0132_add_lastpasswordchange_to_user.py new file mode 100755 index 000000000000..87ccd736f528 --- /dev/null +++ b/lib/galaxy/model/migrate/versions/0132_add_lastpasswordchange_to_user.py @@ -0,0 +1,18 @@ +""" +Migration script to add a last_password_change field to +""" + +from sqlalchemy import Table, MetaData, DateTime, Column + + +def upgrade(migrate_engine): + meta = MetaData(bind=migrate_engine) + account = Table('galaxy_user', meta, autoload=True) + lpc = Column('last_password_change', DateTime()) + lpc.create(account) + + +def downgrade(migrate_engine): + meta = MetaData(bind=migrate_engine) + account = Table('galaxy_user', meta, autoload=True) + account.c.last_password_change.drop() diff --git a/lib/galaxy/webapps/galaxy/controllers/user.py b/lib/galaxy/webapps/galaxy/controllers/user.py index b1fa9f679063..1908528cd384 100644 --- a/lib/galaxy/webapps/galaxy/controllers/user.py +++ b/lib/galaxy/webapps/galaxy/controllers/user.py @@ -563,6 +563,19 @@ def __validate_login( self, trans, **kwd ): message, status = self.resend_verification_email( trans, user.email, user.username ) else: # activation is OFF message, success, status = self.proceed_login( trans, user, redirect ) + pw_expires = trans.app.config.password_expiration_period + if pw_expires: + if user.last_password_change < datetime.today() - pw_expires: + trans.response.send_redirect(web.url_for(controller='user', + action='change_password', + message='Your time is up! Change your password to access galaxy', + redirect_home=True, + status='error')) + elif user.last_password_change < datetime.today() - timedelta(days=pw_expires.days/10): + expiredate = datetime.today() - user.last_password_change + pw_expires + message = 'You are now logged in as %s. Your password will expire in %s days.
You can go back to the page you were visiting or go to the home page.' % \ + (expiredate.days, user.email, redirect, url_for('/')) + status = 'error' return ( message, status, user, success ) def proceed_login( self, trans, user, redirect ): @@ -1118,7 +1131,7 @@ def change_password( self, trans, token=None, **kwd): provided, don't require current password. """ status = None - message = None + message = kwd.get( 'message', '' ) user = None if kwd.get( 'change_password_button', False ): password = kwd.get( 'password', '' ) @@ -1162,11 +1175,17 @@ def change_password( self, trans, token=None, **kwd): trans.sa_session.add( user ) trans.sa_session.flush() trans.log_event( "User change password" ) - return trans.show_ok_message('The password has been changed and any other existing Galaxy sessions have been logged out (but jobs in histories in those sessions will not be interrupted).') + if kwd.get('display_top', False) == 'True': + return trans.response.send_redirect( url_for( '/', message='Password has been changed' )) + else: + return trans.show_ok_message('The password has been changed and any other existing Galaxy sessions have been logged out (but jobs in histories in those sessions will not be interrupted).') + return trans.fill_template( '/user/change_password.mako', token=token, status=status, - message=message ) + message=message, + display_top=kwd.get('redirect_home', False) + ) @web.expose def reset_password( self, trans, email=None, **kwd ): diff --git a/templates/user/change_password.mako b/templates/user/change_password.mako index 14030c3d8b94..be5173c54abf 100644 --- a/templates/user/change_password.mako +++ b/templates/user/change_password.mako @@ -1,4 +1,15 @@ <%inherit file="/base.mako"/> + +%if display_top: + +%endif + + <%namespace file="/message.mako" import="render_msg" /> %if message: @@ -7,6 +18,7 @@
+
Change Password
%if token: From 188df8c84265ddcf55712d4cf0e8a485cad7665b Mon Sep 17 00:00:00 2001 From: Ben Fulton Date: Fri, 10 Jun 2016 12:45:21 -0400 Subject: [PATCH 02/86] Fix missing whitespace around arithmetic operator warning --- lib/galaxy/webapps/galaxy/controllers/user.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) mode change 100644 => 100755 lib/galaxy/webapps/galaxy/controllers/user.py diff --git a/lib/galaxy/webapps/galaxy/controllers/user.py b/lib/galaxy/webapps/galaxy/controllers/user.py old mode 100644 new mode 100755 index 1908528cd384..04251859302b --- a/lib/galaxy/webapps/galaxy/controllers/user.py +++ b/lib/galaxy/webapps/galaxy/controllers/user.py @@ -571,7 +571,7 @@ def __validate_login( self, trans, **kwd ): message='Your time is up! Change your password to access galaxy', redirect_home=True, status='error')) - elif user.last_password_change < datetime.today() - timedelta(days=pw_expires.days/10): + elif user.last_password_change < datetime.today() - timedelta(days=pw_expires.days / 10): expiredate = datetime.today() - user.last_password_change + pw_expires message = 'You are now logged in as %s. Your password will expire in %s days.
You can go back to the page you were visiting or go to the home page.' % \ (expiredate.days, user.email, redirect, url_for('/')) From d31ce0d95949d3ed2f956398d5883d28fa7f65fd Mon Sep 17 00:00:00 2001 From: Ben Fulton Date: Fri, 10 Jun 2016 16:06:00 -0400 Subject: [PATCH 03/86] Improve wording of password expired message. Stylistic changes --- lib/galaxy/config.py | 2 +- .../migrate/versions/0132_add_lastpasswordchange_to_user.py | 2 +- lib/galaxy/webapps/galaxy/controllers/user.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) mode change 100644 => 100755 lib/galaxy/config.py diff --git a/lib/galaxy/config.py b/lib/galaxy/config.py old mode 100644 new mode 100755 index 8566e0918813..34d11ef99306 --- a/lib/galaxy/config.py +++ b/lib/galaxy/config.py @@ -113,7 +113,7 @@ def __init__( self, **kwargs ): self.expose_user_name = kwargs.get( "expose_user_name", False ) self.expose_user_email = kwargs.get( "expose_user_email", False ) - self.password_expiration_period = timedelta(days=int(kwargs.get( "password_expiration_period", 0 ))) + self.password_expiration_period = timedelta( days=int( kwargs.get( "password_expiration_period", 0 ) ) ) # Check for tools defined in the above non-shed tool configs (i.e., tool_conf.xml) tht have # been migrated from the Galaxy code distribution to the Tool Shed. diff --git a/lib/galaxy/model/migrate/versions/0132_add_lastpasswordchange_to_user.py b/lib/galaxy/model/migrate/versions/0132_add_lastpasswordchange_to_user.py index 87ccd736f528..bb90cc71847b 100755 --- a/lib/galaxy/model/migrate/versions/0132_add_lastpasswordchange_to_user.py +++ b/lib/galaxy/model/migrate/versions/0132_add_lastpasswordchange_to_user.py @@ -1,5 +1,5 @@ """ -Migration script to add a last_password_change field to +Migration script to add a last_password_change field to the user table """ from sqlalchemy import Table, MetaData, DateTime, Column diff --git a/lib/galaxy/webapps/galaxy/controllers/user.py b/lib/galaxy/webapps/galaxy/controllers/user.py index 04251859302b..95178075270f 100755 --- a/lib/galaxy/webapps/galaxy/controllers/user.py +++ b/lib/galaxy/webapps/galaxy/controllers/user.py @@ -568,7 +568,7 @@ def __validate_login( self, trans, **kwd ): if user.last_password_change < datetime.today() - pw_expires: trans.response.send_redirect(web.url_for(controller='user', action='change_password', - message='Your time is up! Change your password to access galaxy', + message='Your password has expired. Please change it to access Galaxy.', redirect_home=True, status='error')) elif user.last_password_change < datetime.today() - timedelta(days=pw_expires.days / 10): From 201a514cfde26dd0f3c2089f9544f2d2693cf2fb Mon Sep 17 00:00:00 2001 From: Dannon Baker Date: Wed, 15 Jun 2016 11:27:38 -0400 Subject: [PATCH 04/86] Fix file permissions. --- lib/galaxy/config.py | 0 lib/galaxy/webapps/galaxy/controllers/user.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) mode change 100755 => 100644 lib/galaxy/config.py mode change 100755 => 100644 lib/galaxy/webapps/galaxy/controllers/user.py diff --git a/lib/galaxy/config.py b/lib/galaxy/config.py old mode 100755 new mode 100644 diff --git a/lib/galaxy/webapps/galaxy/controllers/user.py b/lib/galaxy/webapps/galaxy/controllers/user.py old mode 100755 new mode 100644 From 7a480e8c9ee180f6699535a4c321d959ba684699 Mon Sep 17 00:00:00 2001 From: Dannon Baker Date: Wed, 15 Jun 2016 11:53:17 -0400 Subject: [PATCH 05/86] Don't actually log the person in in the event that their password has expired. Change 'about to expire' warning to a warning and not an error. --- lib/galaxy/webapps/galaxy/controllers/user.py | 27 ++++++++++--------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/lib/galaxy/webapps/galaxy/controllers/user.py b/lib/galaxy/webapps/galaxy/controllers/user.py index 95178075270f..ce9d25079af2 100644 --- a/lib/galaxy/webapps/galaxy/controllers/user.py +++ b/lib/galaxy/webapps/galaxy/controllers/user.py @@ -562,20 +562,21 @@ def __validate_login( self, trans, **kwd ): else: # Grace period is off. Login is disabled and user will have the activation email resent. message, status = self.resend_verification_email( trans, user.email, user.username ) else: # activation is OFF - message, success, status = self.proceed_login( trans, user, redirect ) pw_expires = trans.app.config.password_expiration_period - if pw_expires: - if user.last_password_change < datetime.today() - pw_expires: - trans.response.send_redirect(web.url_for(controller='user', - action='change_password', - message='Your password has expired. Please change it to access Galaxy.', - redirect_home=True, - status='error')) - elif user.last_password_change < datetime.today() - timedelta(days=pw_expires.days / 10): - expiredate = datetime.today() - user.last_password_change + pw_expires - message = 'You are now logged in as %s. Your password will expire in %s days.
You can go back to the page you were visiting or go to the home page.' % \ - (expiredate.days, user.email, redirect, url_for('/')) - status = 'error' + if pw_expires and user.last_password_change < datetime.today() - pw_expires: + # Password is expired, we don't log them in. + trans.response.send_redirect(web.url_for(controller='user', + action='change_password', + message='Your password has expired. Please change it to access Galaxy.', + redirect_home=True, + status='error')) + message, success, status = self.proceed_login( trans, user, redirect ) + if pw_expires and user.last_password_change < datetime.today() - timedelta(days=pw_expires.days / 10): + # If password is about to expire, modify message to state that. + expiredate = datetime.today() - user.last_password_change + pw_expires + message = 'You are now logged in as %s. Your password will expire in %s days.
You can go back to the page you were visiting or go to the home page.' % \ + (expiredate.days, user.email, redirect, url_for('/')) + status = 'warning' return ( message, status, user, success ) def proceed_login( self, trans, user, redirect ): From 56b5002aababf39a11d8e6fd794a19382f5f767f Mon Sep 17 00:00:00 2001 From: Dannon Baker Date: Wed, 22 Jun 2016 10:29:57 -0400 Subject: [PATCH 06/86] Tabular dataset chunking fix which should prevent duplicated lines. Skipping to the newline previously shifted the chunk past where it should read. --- lib/galaxy/datatypes/tabular.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/galaxy/datatypes/tabular.py b/lib/galaxy/datatypes/tabular.py index e2f021fa2737..cac3e7b47ded 100644 --- a/lib/galaxy/datatypes/tabular.py +++ b/lib/galaxy/datatypes/tabular.py @@ -56,13 +56,16 @@ def displayable( self, dataset ): def get_chunk(self, trans, dataset, chunk): ck_index = int(chunk) f = open(dataset.file_name) + t_start = (ck_index * trans.app.config.display_chunk_size) f.seek(ck_index * trans.app.config.display_chunk_size) # If we aren't at the start of the file, seek to next newline. Do this better eventually. if f.tell() != 0: cursor = f.read(1) while cursor and cursor != '\n': cursor = f.read(1) - ck_data = f.read(trans.app.config.display_chunk_size) + t_firstnewline = f.tell() + t_skipped = t_firstnewline - t_start + ck_data = f.read(trans.app.config.display_chunk_size - (t_skipped)) cursor = f.read(1) while cursor and ck_data[-1] != '\n': ck_data += cursor From 0ab502baa1ae0e16c9345c3ac816a1dde5cb54d9 Mon Sep 17 00:00:00 2001 From: Dannon Baker Date: Wed, 22 Jun 2016 10:37:42 -0400 Subject: [PATCH 07/86] Rename vars to be sensible, add comment to clarify what this is doing. --- lib/galaxy/datatypes/tabular.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/lib/galaxy/datatypes/tabular.py b/lib/galaxy/datatypes/tabular.py index cac3e7b47ded..0a2c03334e70 100644 --- a/lib/galaxy/datatypes/tabular.py +++ b/lib/galaxy/datatypes/tabular.py @@ -56,16 +56,18 @@ def displayable( self, dataset ): def get_chunk(self, trans, dataset, chunk): ck_index = int(chunk) f = open(dataset.file_name) - t_start = (ck_index * trans.app.config.display_chunk_size) - f.seek(ck_index * trans.app.config.display_chunk_size) + initial_offset = (ck_index * trans.app.config.display_chunk_size) + f.seek(initial_offset) # If we aren't at the start of the file, seek to next newline. Do this better eventually. if f.tell() != 0: cursor = f.read(1) while cursor and cursor != '\n': cursor = f.read(1) - t_firstnewline = f.tell() - t_skipped = t_firstnewline - t_start - ck_data = f.read(trans.app.config.display_chunk_size - (t_skipped)) + read_start_offset = f.tell() + prechunk_skip = read_start_offset - initial_offset + # We subtract the prechunk skip out of the primary chunk read to avoid + # shifting the chunk tail onto a line it shouldn't have. + ck_data = f.read(trans.app.config.display_chunk_size - prechunk_skip) cursor = f.read(1) while cursor and ck_data[-1] != '\n': ck_data += cursor From aeb05e7645a37ffd5cd60b382a429959fca4a20b Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 23 Jun 2016 20:20:11 -0400 Subject: [PATCH 08/86] Add Makefile target for fetching updated bower dependencies. This mirrors the existing `client` target and serves as top-level documentation for how to perform this action. --- Makefile | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index e1c0b3d3e25e..cea7c06f3674 100644 --- a/Makefile +++ b/Makefile @@ -12,6 +12,7 @@ VENV?=.venv IN_VENV=if [ -f $(VENV)/bin/activate ]; then . $(VENV)/bin/activate; fi; PROJECT_URL?=https://github.com/galaxyproject/galaxy GRUNT_DOCKER_NAME:=galaxy/client-builder:16.01 +GRUNT_EXEC?=node_modules/grunt-cli/bin/grunt all: help @echo "This makefile is primarily used for building Galaxy's JS client. A sensible all target is not yet implemented." @@ -59,10 +60,13 @@ npm-deps: ## Install NodeJS dependencies. cd client && npm install grunt: npm-deps ## Calls out to Grunt to build client - cd client && node_modules/grunt-cli/bin/grunt + cd client && $(GRUNT_EXEC) style: npm-deps ## Calls the style task of Grunt - cd client && node_modules/grunt-cli/bin/grunt style + cd client && $(GRUNT_EXEC) style + +client-install-libs: npm-deps ## Fetch updated client dependencies using bower. + cd client && $(GRUNT_EXEC) install-libs client: grunt style ## Rebuild all client-side artifacts From ca82e6d746a685209b9585b3f69128a55198f801 Mon Sep 17 00:00:00 2001 From: Pablo Moreno Date: Sat, 25 Jun 2016 15:05:04 +0100 Subject: [PATCH 09/86] Deletes existing jobs with same ID when re-submitting. --- lib/galaxy/jobs/runners/kubernetes.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/lib/galaxy/jobs/runners/kubernetes.py b/lib/galaxy/jobs/runners/kubernetes.py index 10c70244cc46..338e909fe146 100644 --- a/lib/galaxy/jobs/runners/kubernetes.py +++ b/lib/galaxy/jobs/runners/kubernetes.py @@ -89,7 +89,14 @@ def queue_job(self, job_wrapper): "spec": self.__get_k8s_job_spec(job_wrapper) } + # Checks if job exists + job = Job(self._pykube_api, k8s_job_obj) + if job.exists(): + job.delete() # Creates the Kubernetes Job + # TODO if a job with that ID exists, what should we do? + # TODO do we trust that this is the same job and use that? + # TODO or create a new job as we cannot make sure Job(self._pykube_api, k8s_job_obj).create() # define job attributes in the AsyncronousJobState for follow-up From 01fd25b9f4b79b4f2868efc51ad76e19fc132b5d Mon Sep 17 00:00:00 2001 From: Pablo Moreno Date: Sat, 25 Jun 2016 15:16:36 +0100 Subject: [PATCH 10/86] Changes use of produce_unique_k8s_job_name to be able to use both job and job_wrappers. --- lib/galaxy/jobs/runners/kubernetes.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/galaxy/jobs/runners/kubernetes.py b/lib/galaxy/jobs/runners/kubernetes.py index 338e909fe146..230ba639e018 100644 --- a/lib/galaxy/jobs/runners/kubernetes.py +++ b/lib/galaxy/jobs/runners/kubernetes.py @@ -73,7 +73,7 @@ def queue_job(self, job_wrapper): job_destination = job_wrapper.job_destination # Construction of the Kubernetes Job object follows: http://kubernetes.io/docs/user-guide/persistent-volumes/ - k8s_job_name = self.__produce_unique_k8s_job_name(job_wrapper) + k8s_job_name = self.__produce_unique_k8s_job_name(job_wrapper.get_id_tag()) k8s_job_obj = { "apiVersion": "extensions/v1beta1", "kind": "Job", @@ -108,9 +108,9 @@ def queue_job(self, job_wrapper): external_runjob_script = None return external_runjob_script - def __produce_unique_k8s_job_name(self, job_wrapper): + def __produce_unique_k8s_job_name(self, galaxy_internal_job_id): # wrapper.get_id_tag() instead of job_id for compatibility with TaskWrappers. - return "galaxy-" + job_wrapper.get_id_tag() + return "galaxy-" + galaxy_internal_job_id def __get_k8s_job_spec(self, job_wrapper): """Creates the k8s Job spec. For a Job spec, the only requirement is to have a .spec.template.""" @@ -123,7 +123,7 @@ def __get_k8s_job_spec_template(self, job_wrapper): (see pod selector) and an appropriate restart policy.""" k8s_spec_template = { "metadata": { - "labels": {"app": self.__produce_unique_k8s_job_name(job_wrapper)} + "labels": {"app": self.__produce_unique_k8s_job_name(job_wrapper.get_id_tag())} }, "spec": { "volumes": self.__get_k8s_mountable_volumes(job_wrapper), From 409eb373cd9233f7925c64e9c5716859a2363ba1 Mon Sep 17 00:00:00 2001 From: Pablo Moreno Date: Sat, 25 Jun 2016 15:17:58 +0100 Subject: [PATCH 11/86] Fixes stop job method so that it correctly scales down the Kubernetes job. --- lib/galaxy/jobs/runners/kubernetes.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/jobs/runners/kubernetes.py b/lib/galaxy/jobs/runners/kubernetes.py index 230ba639e018..bc69aa86edb8 100644 --- a/lib/galaxy/jobs/runners/kubernetes.py +++ b/lib/galaxy/jobs/runners/kubernetes.py @@ -284,8 +284,9 @@ def __produce_log_file(self, job_state): def stop_job(self, job): """Attempts to delete a dispatched job to the k8s cluster""" try: - jobs = Job.objects(self._pykube_api).filter(selector="app=" + job.job_runner_external_id) - if jobs.response['items'].len() >= 0: + jobs = Job.objects(self._pykube_api).filter(selector="app=" + + self.__produce_unique_k8s_job_name(job.get_id_tag())) + if len(jobs.response['items']) >= 0: job_to_delete = Job(self._pykube_api, jobs.response['items'][0]) job_to_delete.scale(replicas=0) # TODO assert whether job parallelism == 0 From 861740bd6080bf0ab280594d24460143cbee1033 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Sat, 25 Jun 2016 10:54:51 -0400 Subject: [PATCH 12/86] Fix dataset matcher when no tool available. Broken with 80ac816af94d43d7214671582db127483baea2d3. --- lib/galaxy/tools/parameters/dataset_matcher.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/lib/galaxy/tools/parameters/dataset_matcher.py b/lib/galaxy/tools/parameters/dataset_matcher.py index c919864eff00..a04dc67dfab5 100644 --- a/lib/galaxy/tools/parameters/dataset_matcher.py +++ b/lib/galaxy/tools/parameters/dataset_matcher.py @@ -35,7 +35,12 @@ def hda_accessible( self, hda, check_security=True ): accessible to user. """ dataset = hda.dataset - state_valid = dataset.state in self.tool.valid_input_states + has_tool = self.tool + if has_tool: + valid_input_states = self.tool.valid_input_states + else: + valid_input_states = galaxy.model.Dataset.valid_input_states + state_valid = dataset.state in valid_input_states return state_valid and ( not check_security or self.__can_access_dataset( dataset ) ) def valid_hda_match( self, hda, check_implicit_conversions=True, check_security=False ): From a77e3ff6607a86a85a30bfa552402e57602ab0e2 Mon Sep 17 00:00:00 2001 From: Timur Shtatland Date: Sat, 25 Jun 2016 13:07:27 -0400 Subject: [PATCH 13/86] Add tox.ini config params for python3.5 --- tox.ini | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 2077c60852d6..65af9bda1f39 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py27-lint, py27-unit, qunit, mako-count, web-controller-line-count, py34-lint +envlist = py27-lint, py27-unit, qunit, mako-count, web-controller-line-count, py34-lint, py35-lint skipsdist = True @@ -13,6 +13,11 @@ commands = bash .ci/flake8_py3_wrapper.sh whitelist_externals = bash deps = flake8 +[testenv:py35-lint] +commands = bash .ci/flake8_py3_wrapper.sh +whitelist_externals = bash +deps = flake8 + [testenv:py27-unit] commands = bash run_tests.sh --no-create-venv -u From c9cbcf6f79b8d6ae3fa6fa6178c0d59440b8545b Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Sat, 25 Jun 2016 14:02:22 -0400 Subject: [PATCH 14/86] Python3: fix passwords.py --- lib/galaxy/security/passwords.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/galaxy/security/passwords.py b/lib/galaxy/security/passwords.py index bf6a34aca522..299f6d6bd966 100644 --- a/lib/galaxy/security/passwords.py +++ b/lib/galaxy/security/passwords.py @@ -71,11 +71,11 @@ def pbkdf2_bin( data, salt, iterations=1000, keylen=24, hashfunc=None ): def _pseudorandom(x, mac=mac): h = mac.copy() h.update(x) - return map(ord, h.digest()) + return [ord(_) for _ in h.digest()] buf = [] - for block in xrange(1, -(-keylen // mac.digest_size) + 1): + for block in range(1, -(-keylen // mac.digest_size) + 1): rv = u = _pseudorandom(salt + _pack_int(block)) - for i in xrange(iterations - 1): + for i in range(iterations - 1): u = _pseudorandom(''.join(map(chr, u))) rv = starmap( xor, zip( rv, u ) ) # Python < 2.6.8: starmap requires function inputs to be tuples, so we need to use zip instead of izip buf.extend(rv) From bd92dc17fc8314806db39354c1c6d07286a97550 Mon Sep 17 00:00:00 2001 From: Xiaoqian Jiang Date: Sat, 25 Jun 2016 16:14:15 -0400 Subject: [PATCH 15/86] Python3: test/unit/managers/base.py --- .ci/py3_sources.txt | 1 + test/unit/managers/base.py | 18 ++++++++++-------- tox.ini | 14 +++++++++----- 3 files changed, 20 insertions(+), 13 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index 325fa99e1606..9fbf4113f082 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -52,6 +52,7 @@ test/casperjs/ test/functional/ test/integration/ test/manual/ +test/unit/managers/base.py test/unit/tools/test_actions.py test/unit/workflows/test_run_parameters.py tool_list.py diff --git a/test/unit/managers/base.py b/test/unit/managers/base.py index 1df3cda3a9b5..fe40a6f3acca 100644 --- a/test/unit/managers/base.py +++ b/test/unit/managers/base.py @@ -4,8 +4,10 @@ import os import imp - import json +import collections + +from six import string_types test_utils = imp.load_source( 'test_utils', os.path.join( os.path.dirname( __file__), '../unittest_utils/utility.py' ) ) @@ -60,7 +62,7 @@ def log( self, *args, **kwargs ): print( *args, **kwargs ) # ---- additional test types - TYPES_NEEDING_NO_SERIALIZERS = ( basestring, bool, type( None ), int, float ) + TYPES_NEEDING_NO_SERIALIZERS = ( string_types, bool, type( None ), int, float ) def assertKeys( self, obj, key_list ): self.assertEqual( sorted( obj.keys() ), sorted( key_list ) ) @@ -73,13 +75,13 @@ def assertHasKeys( self, obj, key_list ): self.assertTrue( True, 'keys found in object' ) def assertNullableBasestring( self, item ): - if not isinstance( item, ( basestring, type( None ) ) ): + if not isinstance( item, ( string_types, type( None ) ) ): self.fail( 'Non-nullable basestring: ' + str( type( item ) ) ) # TODO: len mod 8 and hex re self.assertTrue( True, 'is nullable basestring: ' + str( item ) ) def assertEncodedId( self, item ): - if not isinstance( item, basestring ): + if not isinstance( item, string_types ): self.fail( 'Non-string: ' + str( type( item ) ) ) # TODO: len mod 8 and hex re self.assertTrue( True, 'is id: ' + item ) @@ -91,14 +93,14 @@ def assertNullableEncodedId( self, item ): self.assertEncodedId( item ) def assertDate( self, item ): - if not isinstance( item, basestring ): + if not isinstance( item, string_types ): self.fail( 'Non-string: ' + str( type( item ) ) ) # TODO: no great way to parse this fully (w/o python-dateutil) # TODO: re? self.assertTrue( True, 'is date: ' + item ) def assertUUID( self, item ): - if not isinstance( item, basestring ): + if not isinstance( item, string_types ): self.fail( 'Non-string: ' + str( type( item ) ) ) # TODO: re for d4d76d69-80d4-4ed7-80c7-211ebcc1a358 self.assertTrue( True, 'is uuid: ' + item ) @@ -109,13 +111,13 @@ def assertORMFilter( self, item, msg=None ): self.assertTrue( True, msg or ( 'is an orm filter: ' + str( item ) ) ) def assertFnFilter( self, item, msg=None ): - if not item or not callable( item ): + if not item or not isinstance( item, collections.Callable): self.fail( 'Not a fn filter: ' + str( type( item ) ) ) self.assertTrue( True, msg or ( 'is a fn filter: ' + str( item ) ) ) def assertIsJsonifyable( self, item ): # TODO: use galaxy's override - self.assertIsInstance( json.dumps( item ), basestring ) + self.assertIsInstance( json.dumps( item ), string_types ) class CreatesCollectionsMixin( object ): diff --git a/tox.ini b/tox.ini index 65af9bda1f39..a0a82bbb322e 100644 --- a/tox.ini +++ b/tox.ini @@ -1,23 +1,27 @@ [tox] -envlist = py27-lint, py27-unit, qunit, mako-count, web-controller-line-count, py34-lint, py35-lint +envlist = py27-lint, py27-unit, qunit, mako-count, web-controller-line-count, py33-lint, py34-lint, py35-lint skipsdist = True [testenv:py27-lint] commands = bash .ci/flake8_wrapper.sh whitelist_externals = bash -deps = flake8 +deps = flake8==2.6.0 + +[testenv:py33-lint] +commands = bash .ci/flake8_py3_wrapper.sh +whitelist_externals = bash +deps = flake8==2.6.0 [testenv:py34-lint] commands = bash .ci/flake8_py3_wrapper.sh whitelist_externals = bash -deps = flake8 +deps = flake8==2.6.0 [testenv:py35-lint] commands = bash .ci/flake8_py3_wrapper.sh whitelist_externals = bash -deps = flake8 - +deps = flake8==2.6.0 [testenv:py27-unit] commands = bash run_tests.sh --no-create-venv -u From dd931f9691f2599b4c6642015c95bc10a7f6590a Mon Sep 17 00:00:00 2001 From: Timur Shtatland Date: Sat, 25 Jun 2016 16:39:25 -0400 Subject: [PATCH 16/86] Python3: test/unit/test_galaxy_mapping.py --- .ci/py3_sources.txt | 1 + test/unit/test_galaxy_mapping.py | 18 ++++++++++-------- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index 9fbf4113f082..8541ee999c6c 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -53,6 +53,7 @@ test/functional/ test/integration/ test/manual/ test/unit/managers/base.py +test/unit/test_galaxy_mapping.py test/unit/tools/test_actions.py test/unit/workflows/test_run_parameters.py tool_list.py diff --git a/test/unit/test_galaxy_mapping.py b/test/unit/test_galaxy_mapping.py index cd56cc831ebe..e3a49097ebb3 100644 --- a/test/unit/test_galaxy_mapping.py +++ b/test/unit/test_galaxy_mapping.py @@ -2,6 +2,7 @@ import unittest import galaxy.model.mapping as mapping import uuid +from six import text_type class MappingTests( unittest.TestCase ): @@ -16,7 +17,7 @@ def persist_and_check_annotation( annotation_class, **kwds ): annotated_association = annotation_class() annotated_association.annotation = "Test Annotation" annotated_association.user = u - for key, value in kwds.iteritems(): + for key, value in kwds.items(): setattr(annotated_association, key, value) self.persist( annotated_association ) self.expunge() @@ -75,7 +76,7 @@ def persist_and_check_rating( rating_class, **kwds ): rating_association = rating_class() rating_association.rating = 5 rating_association.user = u - for key, value in kwds.iteritems(): + for key, value in kwds.items(): setattr(rating_association, key, value) self.persist( rating_association ) self.expunge() @@ -118,8 +119,8 @@ def persist_and_check_rating( rating_class, **kwds ): def test_display_name( self ): def assert_display_name_converts_to_unicode( item, name ): - assert not isinstance( item.name, unicode ) - assert isinstance( item.get_display_name(), unicode ) + assert not isinstance( item.name, text_type ) + assert isinstance( item.get_display_name(), text_type ) assert item.get_display_name() == name ldda = self.model.LibraryDatasetDatasetAssociation( name='ldda_name' ) @@ -140,8 +141,9 @@ def assert_display_name_converts_to_unicode( item, name ): history = self.model.History( name=u'Hello₩◎ґʟⅾ' ) - assert isinstance( history.name, unicode ) - assert isinstance( history.get_display_name(), unicode ) + + assert isinstance( history.name, text_type ) + assert isinstance( history.get_display_name(), text_type ) assert history.get_display_name() == u'Hello₩◎ґʟⅾ' def test_tags( self ): @@ -203,7 +205,7 @@ def test_collections_in_histories(self): self.persist( u, h1, d1, d2, c1, hc1, dce1, dce2 ) loaded_dataset_collection = self.query( model.HistoryDatasetCollectionAssociation ).filter( model.HistoryDatasetCollectionAssociation.name == "HistoryCollectionTest1" ).first().collection - self.assertEquals(len(loaded_dataset_collection.elements), 2) + self.assertEqual(len(loaded_dataset_collection.elements), 2) assert loaded_dataset_collection.collection_type == "pair" assert loaded_dataset_collection[ "left" ] == dce1 assert loaded_dataset_collection[ "right" ] == dce2 @@ -356,7 +358,7 @@ def contents_iter_names(**kwds): ).first() return list( map( lambda hda: hda.name, history.contents_iter( **kwds ) ) ) - self.assertEquals(contents_iter_names(), [ "1", "2", "3", "4" ]) + self.assertEqual(contents_iter_names(), [ "1", "2", "3", "4" ]) assert contents_iter_names( deleted=False ) == [ "1", "2" ] assert contents_iter_names( visible=True ) == [ "1", "3" ] assert contents_iter_names( visible=False ) == [ "2", "4" ] From dba8d0bb0a717427a9f3647f9f32935f60c5f063 Mon Sep 17 00:00:00 2001 From: Timur Shtatland Date: Sat, 25 Jun 2016 17:07:59 -0400 Subject: [PATCH 17/86] Python3: tools/stats/aggregate_scores_in_intervals.py --- .ci/py3_sources.txt | 1 + tools/stats/aggregate_scores_in_intervals.py | 14 +++++++------- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index 8541ee999c6c..a7f86fbcca36 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -60,4 +60,5 @@ tool_list.py tools/data_source/ tools/evolution/ tools/sr_mapping/ +tools/stats/aggregate_scores_in_intervals.py tools/visualization/ diff --git a/tools/stats/aggregate_scores_in_intervals.py b/tools/stats/aggregate_scores_in_intervals.py index 77e8ca7c2278..9e18d8776b2d 100755 --- a/tools/stats/aggregate_scores_in_intervals.py +++ b/tools/stats/aggregate_scores_in_intervals.py @@ -7,7 +7,7 @@ -c, --chrom_buffer=INT: number of chromosomes (default is 3) to keep in memory when using a user supplied score file """ -from __future__ import division +from __future__ import division, print_function import os import os.path @@ -63,7 +63,7 @@ def __len__( self ): def __repr__( self ): i = 0 repr = "[ " - for i in xrange( self.length ): + for i in range( self.length ): repr = "%s %s," % ( repr, self[i] ) return "%s ]" % ( repr ) @@ -113,7 +113,7 @@ def load_scores_wiggle( fname, chrom_buffer_size=3 ): scores_by_chrom[chrom][pos] = val except UCSCLimitException: # Wiggle data was truncated, at the very least need to warn the user. - print 'Encountered message from UCSC: "Reached output limit of 100000 data values", so be aware your data was truncated.' + print('Encountered message from UCSC: "Reached output limit of 100000 data values", so be aware your data was truncated.') except IndexError: stop_err('Data error: one or more column data values is missing in "%s"' % fname) except ValueError: @@ -230,7 +230,7 @@ def main(): out_line.append(min_score) out_line.append(max_score) - print >> out_file, "\t".join( map( str, out_line ) ) + print("\t".join( map( str, out_line ) ), file=out_file) else: skipped_lines += 1 if not invalid_line: @@ -238,14 +238,14 @@ def main(): invalid_line = line elif line.startswith( '#' ): # We'll save the original comments - print >> out_file, line + print(line, file=out_file) out_file.close() if skipped_lines > 0: - print 'Data issue: skipped %d invalid lines starting at line #%d which is "%s"' % ( skipped_lines, first_invalid_line, invalid_line ) + print('Data issue: skipped %d invalid lines starting at line #%d which is "%s"' % ( skipped_lines, first_invalid_line, invalid_line )) if skipped_lines == i: - print 'Consider changing the metadata for the input dataset by clicking on the pencil icon in the history item.' + print('Consider changing the metadata for the input dataset by clicking on the pencil icon in the history item.') if __name__ == "__main__": main() From 17fc7ac5bc8b3a2f973fd2ed8337682223d514c6 Mon Sep 17 00:00:00 2001 From: Xiaoqian Jiang Date: Sat, 25 Jun 2016 17:24:10 -0400 Subject: [PATCH 18/86] Python3: test/unit/managers/test_DatasetManager.py --- .ci/py3_sources.txt | 1 + test/unit/managers/test_DatasetManager.py | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index a7f86fbcca36..87ad7ea6ad64 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -49,6 +49,7 @@ scripts/cleanup_datasets/cleanup_datasets.py test/api/test_workflows_from_yaml.py test/base/ test/casperjs/ +test/unit/managers/test_DatasetManager.py test/functional/ test/integration/ test/manual/ diff --git a/test/unit/managers/test_DatasetManager.py b/test/unit/managers/test_DatasetManager.py index 4edf69cc19b0..cbbb93e2d9fe 100644 --- a/test/unit/managers/test_DatasetManager.py +++ b/test/unit/managers/test_DatasetManager.py @@ -9,11 +9,12 @@ os.path.join( os.path.dirname( __file__), '../unittest_utils/utility.py' ) ) import sqlalchemy +from six import string_types from galaxy import model from galaxy import exceptions -from base import BaseTestCase +from .base import BaseTestCase from galaxy.managers.base import SkipAttribute from galaxy.managers.roles import RoleManager @@ -271,7 +272,7 @@ def test_serializers( self ): self.assertDate( serialized[ 'update_time' ] ) self.assertUUID( serialized[ 'uuid' ] ) - self.assertIsInstance( serialized[ 'state' ], basestring ) + self.assertIsInstance( serialized[ 'state' ], string_types ) self.assertIsInstance( serialized[ 'deleted' ], bool ) self.assertIsInstance( serialized[ 'purged' ], bool ) self.assertIsInstance( serialized[ 'purgable' ], bool ) From 2fd84dd4995c03f966dbdd1df26dc984e3bbd975 Mon Sep 17 00:00:00 2001 From: Timur Shtatland Date: Sat, 25 Jun 2016 17:26:53 -0400 Subject: [PATCH 19/86] Python3: lib/galaxy/datatypes/converters/interval_to_coverage.py --- .ci/py3_sources.txt | 1 + .../converters/interval_to_coverage.py | 18 +++++++++--------- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index a7f86fbcca36..0831f7fa0ab4 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -3,6 +3,7 @@ cron/ lib/galaxy/actions/ lib/galaxy/auth/ lib/galaxy/config.py +lib/galaxy/datatypes/converters/interval_to_coverage.py lib/galaxy/dependencies/ lib/galaxy/eggs/ lib/galaxy/exceptions/ diff --git a/lib/galaxy/datatypes/converters/interval_to_coverage.py b/lib/galaxy/datatypes/converters/interval_to_coverage.py index 1d367ccb5fbc..11727cc4f622 100644 --- a/lib/galaxy/datatypes/converters/interval_to_coverage.py +++ b/lib/galaxy/datatypes/converters/interval_to_coverage.py @@ -6,7 +6,7 @@ -1, --cols1=N,N,N,N: Columns for chrom, start, end, strand in interval file -2, --cols2=N,N,N,N: Columns for chrom, start, end, strand in coverage file """ -import commands +import subprocess import tempfile from bisect import bisect from os import environ @@ -42,11 +42,11 @@ def main( interval, coverage ): for record in interval: chrom = record.chrom if lastchrom and not lastchrom == chrom and partitions: - for partition in xrange(0, len(partitions) - 1): + for partition in range(0, len(partitions) - 1): forward = forward_covs[partition] reverse = reverse_covs[partition] if forward + reverse > 0: - coverage.write(chrom=chrom, position=xrange(partitions[partition], partitions[partition + 1]), + coverage.write(chrom=chrom, position=range(partitions[partition], partitions[partition + 1]), forward=forward, reverse=reverse) partitions = [] forward_covs = [] @@ -64,7 +64,7 @@ def main( interval, coverage ): forward_covs.insert(start_index, forward_base) reverse_covs.insert(start_index, reverse_base) end_index = bisect(partitions, record.end) - for index in xrange(start_index, end_index): + for index in range(start_index, end_index): forward_covs[index] += forward reverse_covs[index] += reverse partitions.insert(end_index, record.end) @@ -72,11 +72,11 @@ def main( interval, coverage ): reverse_covs.insert(end_index, reverse_covs[end_index - 1] - reverse ) if partitions: - for partition in xrange(0, start_index): + for partition in range(0, start_index): forward = forward_covs[partition] reverse = reverse_covs[partition] if forward + reverse > 0: - coverage.write(chrom=chrom, position=xrange(partitions[partition], partitions[partition + 1]), + coverage.write(chrom=chrom, position=range(partitions[partition], partitions[partition + 1]), forward=forward, reverse=reverse) partitions = partitions[start_index:] forward_covs = forward_covs[start_index:] @@ -86,11 +86,11 @@ def main( interval, coverage ): # Finish the last chromosome if partitions: - for partition in xrange(0, len(partitions) - 1): + for partition in range(0, len(partitions) - 1): forward = forward_covs[partition] reverse = reverse_covs[partition] if forward + reverse > 0: - coverage.write(chrom=chrom, position=xrange(partitions[partition], partitions[partition + 1]), + coverage.write(chrom=chrom, position=range(partitions[partition], partitions[partition + 1]), forward=forward, reverse=reverse) @@ -133,7 +133,7 @@ def close(self): temp_file = tempfile.NamedTemporaryFile(mode="r") environ['LC_ALL'] = 'POSIX' commandline = "sort -f -n -k %d -k %d -k %d -o %s %s" % (chr_col_1 + 1, start_col_1 + 1, end_col_1 + 1, temp_file.name, in_fname) - errorcode, stdout = commands.getstatusoutput(commandline) + errorcode, stdout = subprocess.getstatusoutput(commandline) coverage = CoverageWriter( out_stream=open(out_fname, "a"), chromCol=chr_col_2, positionCol=position_col_2, From 7d7396f18f65e393ff330cdb9f509fd7036eb804 Mon Sep 17 00:00:00 2001 From: Junzhou Wang Date: Sat, 25 Jun 2016 17:13:13 -0400 Subject: [PATCH 20/86] Python3: test/api/ --- .ci/py3_sources.txt | 5 ++ test/api/helpers.py | 10 ++-- test/api/test_datasets.py | 5 +- test/api/test_tool_data.py | 8 ++-- test/api/test_workflow_extraction.py | 25 +++++----- test/api/test_workflows.py | 68 ++++++++++++++-------------- 6 files changed, 64 insertions(+), 57 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index 499859e52933..52cc6846b354 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -64,3 +64,8 @@ tools/evolution/ tools/sr_mapping/ tools/stats/aggregate_scores_in_intervals.py tools/visualization/ +test/api/helpers.py +test/api/test_datasets.py +test/api/test_tool_data.py +test/api/test_workflow_extraction.py +test/api/test_workflows.py diff --git a/test/api/helpers.py b/test/api/helpers.py index 02ea2717bce0..2a6b0c2537cb 100644 --- a/test/api/helpers.py +++ b/test/api/helpers.py @@ -3,7 +3,7 @@ import time import json -import StringIO +from six import StringIO from pkg_resources import resource_string # Simple workflow that takes an input and call cat wrapper on it. @@ -27,7 +27,7 @@ def get_tool_ids( api_test_case ): index = api_test_case.galaxy_interactor.get( "tools", data=dict(in_panel=False) ) tools = index.json() # In panels by default, so flatten out sections... - tool_ids = map( itemgetter( "id" ), tools ) + tool_ids = [itemgetter( "id" )(_) for _ in tools] return tool_ids def wrapped_method( api_test_case, *args, **kwargs ): @@ -110,7 +110,7 @@ def upload_payload( self, history_id, content, **kwds ): 'dbkey': dbkey, 'file_type': file_type, } - if isinstance( content, file ): + if hasattr(content, 'read'): upload_params[ "files_0|file_data"] = content else: upload_params[ 'files_0|url_paste' ] = content @@ -329,7 +329,7 @@ def create_dataset_request( self, library, **kwds ): "db_key": kwds.get( "db_key", "?" ), } files = { - "files_0|file_data": kwds.get( "file", StringIO.StringIO( kwds.get( "contents", "TestData" ) ) ), + "files_0|file_data": kwds.get( "file", StringIO( kwds.get( "contents", "TestData" ) ) ), } return create_data, files @@ -438,7 +438,7 @@ def __create( self, payload ): def __datasets( self, history_id, count, contents=None ): datasets = [] - for i in xrange( count ): + for i in range( count ): new_kwds = {} if contents: new_kwds[ "content" ] = contents[ i ] diff --git a/test/api/test_datasets.py b/test/api/test_datasets.py index 9aac049033ca..e777d830bac8 100644 --- a/test/api/test_datasets.py +++ b/test/api/test_datasets.py @@ -1,3 +1,4 @@ +from __future__ import print_function import textwrap from base import api @@ -12,8 +13,8 @@ def setUp( self ): def test_index( self ): index_response = self._get( "datasets" ) - print index_response - print dir( index_response ) + print(index_response) + print(dir( index_response )) self._assert_status_code_is( index_response, 501 ) def test_show( self ): diff --git a/test/api/test_tool_data.py b/test/api/test_tool_data.py index d12d3222241c..0a2408e6f8d9 100644 --- a/test/api/test_tool_data.py +++ b/test/api/test_tool_data.py @@ -1,7 +1,7 @@ """ Tests for the tool data API. """ +from __future__ import print_function from base import api - import operator @@ -14,14 +14,14 @@ def test_admin_only( self ): def test_list(self): index_response = self._get( "tool_data", admin=True ) self._assert_status_code_is( index_response, 200 ) - print index_response.content + print(index_response.content) index = index_response.json() - assert "testalpha" in map(operator.itemgetter("name"), index) + assert "testalpha" in [operator.itemgetter("name")(_) for _ in index] def test_show(self): show_response = self._get( "tool_data/testalpha", admin=True ) self._assert_status_code_is( show_response, 200 ) - print show_response.content + print(show_response.content) data_table = show_response.json() assert data_table["columns"] == ["value", "name", "path"] first_entry = data_table["fields"][0] diff --git a/test/api/test_workflow_extraction.py b/test/api/test_workflow_extraction.py index 331419d0e15c..47f0ab009eb7 100644 --- a/test/api/test_workflow_extraction.py +++ b/test/api/test_workflow_extraction.py @@ -1,3 +1,4 @@ +from __future__ import print_function from collections import namedtuple import functools from json import dumps, loads @@ -18,12 +19,12 @@ def test_extract_from_history( self ): # Run the simple test workflow and extract it back out from history cat1_job_id = self.__setup_and_run_cat1_workflow( history_id=self.history_id ) contents = self._history_contents() - input_hids = map( lambda c: c[ "hid" ], contents[ 0:2 ] ) + input_hids = [c[ "hid" ] for c in contents[ 0:2 ]] downloaded_workflow = self._extract_and_download_workflow( dataset_ids=input_hids, job_ids=[ cat1_job_id ], ) - self.assertEquals( downloaded_workflow[ "name" ], "test import from history" ) + self.assertEqual( downloaded_workflow[ "name" ], "test import from history" ) self.__assert_looks_like_cat1_example_workflow( downloaded_workflow ) def test_extract_with_copied_inputs( self ): @@ -42,7 +43,7 @@ def test_extract_with_copied_inputs( self ): for old_dataset in old_contents: self.__copy_content_to_history( self.history_id, old_dataset ) new_contents = self._history_contents() - input_hids = map( lambda c: c[ "hid" ], new_contents[ (offset + 0):(offset + 2) ] ) + input_hids = [c[ "hid" ] for c in new_contents[ (offset + 0):(offset + 2) ]] cat1_job_id = self.__job_id( self.history_id, new_contents[ (offset + 2) ][ "id" ] ) downloaded_workflow = self._extract_and_download_workflow( dataset_ids=input_hids, @@ -141,7 +142,7 @@ def test_extract_workflows_with_dataset_collections( self ): collection_step = self._get_steps_of_type( downloaded_workflow, "data_collection_input", expected_len=1 )[ 0 ] collection_step_state = loads( collection_step[ "tool_state" ] ) - self.assertEquals( collection_step_state[ "collection_type" ], u"paired" ) + self.assertEqual( collection_step_state[ "collection_type" ], "paired" ) @skip_without_tool( "cat_collection" ) def test_subcollection_mapping( self ): @@ -169,7 +170,7 @@ def test_subcollection_mapping( self ): dataset_collection_ids=[ jobs_summary.inputs["text_input1"]["hid"] ], job_ids=[ job1_id, job2_id ], ) - print jobs_summary.inputs["text_input1"] + print(jobs_summary.inputs["text_input1"]) self.__check_workflow( downloaded_workflow, step_count=3, @@ -181,7 +182,7 @@ def test_subcollection_mapping( self ): collection_step = self._get_steps_of_type( downloaded_workflow, "data_collection_input", expected_len=1 )[ 0 ] collection_step_state = loads( collection_step[ "tool_state" ] ) - self.assertEquals( collection_step_state[ "collection_type" ], u"list:paired" ) + self.assertEqual( collection_step_state[ "collection_type" ], "list:paired" ) @skip_without_tool( "collection_split_on_column" ) def test_extract_workflow_with_output_collections( self ): @@ -214,7 +215,7 @@ def test_extract_workflow_with_output_collections( self ): text_input2: "samp1\t30.0\nsamp2\t40.0\n" """) tool_ids = [ "cat1", "collection_split_on_column", "cat_list" ] - job_ids = map( functools.partial(self._job_id_for_tool, jobs_summary.jobs ), tool_ids ) + job_ids = list(map( functools.partial(self._job_id_for_tool, jobs_summary.jobs ), tool_ids )) downloaded_workflow = self._extract_and_download_workflow( dataset_ids=[ "1", "2" ], job_ids=job_ids, @@ -264,7 +265,7 @@ def test_extract_with_mapped_output_collections( self ): content: "samp1\t30.0\nsamp2\t40.0\n" """) tool_ids = [ "cat1", "collection_creates_pair", "cat_collection", "cat_list" ] - job_ids = map( functools.partial(self._job_id_for_tool, jobs_summary.jobs ), tool_ids ) + job_ids = list(map( functools.partial(self._job_id_for_tool, jobs_summary.jobs ), tool_ids )) downloaded_workflow = self._extract_and_download_workflow( dataset_collection_ids=[ "3" ], job_ids=job_ids, @@ -282,7 +283,7 @@ def _job_id_for_tool( self, jobs, tool_id ): return self._job_for_tool( jobs, tool_id )[ "id" ] def _job_for_tool( self, jobs, tool_id ): - tool_jobs = filter( lambda j: j["tool_id"] == tool_id, jobs ) + tool_jobs = [j for j in jobs if j["tool_id"] == tool_id] if not tool_jobs: assert False, "Failed to find job for tool %s" % tool_id # if len( tool_jobs ) > 1: @@ -330,8 +331,8 @@ def __assert_looks_like_cat1_example_workflow( self, downloaded_workflow ): input1 = tool_step[ "input_connections" ][ "input1" ] input2 = tool_step[ "input_connections" ][ "queries_0|input2" ] - self.assertEquals( input_steps[ 0 ][ "id" ], input1[ "id" ] ) - self.assertEquals( input_steps[ 1 ][ "id" ], input2[ "id" ] ) + self.assertEqual( input_steps[ 0 ][ "id" ], input1[ "id" ] ) + self.assertEqual( input_steps[ 1 ][ "id" ], input2[ "id" ] ) def _history_contents( self, history_id=None ): if history_id is None: @@ -368,7 +369,7 @@ def _assert_first_step_is_paired_input( self, downloaded_workflow ): collection_steps = self._get_steps_of_type( downloaded_workflow, "data_collection_input", expected_len=1 ) collection_step = collection_steps[ 0 ] collection_step_state = loads( collection_step[ "tool_state" ] ) - self.assertEquals( collection_step_state[ "collection_type" ], u"paired" ) + self.assertEqual( collection_step_state[ "collection_type" ], "paired" ) collect_step_idx = collection_step[ "id" ] return collect_step_idx diff --git a/test/api/test_workflows.py b/test/api/test_workflows.py index 15362796218f..ecf2caa6779f 100644 --- a/test/api/test_workflows.py +++ b/test/api/test_workflows.py @@ -81,7 +81,7 @@ def _assert_user_has_workflow_with_name( self, name ): def _workflow_names( self ): index_response = self._get( "workflows" ) self._assert_status_code_is( index_response, 200 ) - names = map( lambda w: w[ "name" ], index_response.json() ) + names = [w[ "name" ] for w in index_response.json()] return names # Import importer interface... @@ -140,7 +140,7 @@ def _setup_workflow_run( self, workflow, inputs_by='step_id', history_id=None ): def _build_ds_map( self, workflow_id, label_map ): workflow_inputs = self._workflow_inputs( workflow_id ) ds_map = {} - for key, value in workflow_inputs.iteritems(): + for key, value in workflow_inputs.items(): label = value[ "label" ] if label in label_map: ds_map[ key ] = label_map[ label ] @@ -275,7 +275,7 @@ def test_show_valid( self ): workflow = show_response.json() self._assert_looks_like_instance_workflow_representation( workflow ) assert len(workflow["steps"]) == 3 - self.assertEquals(sorted([step["id"] for step in workflow["steps"].values()]), [0, 1, 2]) + self.assertEqual(sorted([step["id"] for step in workflow["steps"].values()]), [0, 1, 2]) show_response = self._get( "workflows/%s" % workflow_id, {"legacy": True} ) workflow = show_response.json() @@ -283,7 +283,7 @@ def test_show_valid( self ): assert len(workflow["steps"]) == 3 # Can't reay say what the legacy IDs are but must be greater than 3 because dummy # workflow was created first in this instance. - self.assertNotEquals(sorted([step["id"] for step in workflow["steps"].values()]), [0, 1, 2]) + self.assertNotEqual(sorted([step["id"] for step in workflow["steps"].values()]), [0, 1, 2]) def test_show_invalid_key_is_400( self ): show_response = self._get( "workflows/%s" % self._random_key() ) @@ -344,7 +344,7 @@ def test_update( self ): uuids = {} labels = {} - for order_index, step_dict in original_workflow["steps"].iteritems(): + for order_index, step_dict in original_workflow["steps"].items(): uuid = str(uuid4()) step_dict["uuid"] = uuid uuids[order_index] = uuid @@ -356,8 +356,8 @@ def check_label_and_uuid(order_index, step_dict): assert order_index in uuids assert order_index in labels - self.assertEquals(uuids[order_index], step_dict["uuid"]) - self.assertEquals(labels[order_index], step_dict["label"]) + self.assertEqual(uuids[order_index], step_dict["uuid"]) + self.assertEqual(labels[order_index], step_dict["label"]) upload_response = self.__test_upload( workflow=original_workflow ) workflow_id = upload_response.json()["id"] @@ -377,7 +377,7 @@ def tweak_step(step): assert step_dict['position']['left'] != 1 step_dict['position'] = {'top': 1, 'left': 1} - map(tweak_step, steps.iteritems()) + list(map(tweak_step, iter(steps.items()))) update(workflow_content) @@ -388,7 +388,7 @@ def check_step(step): assert step_dict['position']['left'] == 1 updated_workflow_content = self._download_workflow(workflow_id) - map(check_step, updated_workflow_content['steps'].iteritems()) + list(map(check_step, iter(updated_workflow_content['steps'].items()))) # Re-update against original worklfow... update(original_workflow) @@ -396,7 +396,7 @@ def check_step(step): updated_workflow_content = self._download_workflow(workflow_id) # Make sure the positions have been updated. - map(tweak_step, updated_workflow_content['steps'].iteritems()) + list(map(tweak_step, iter(updated_workflow_content['steps'].items()))) def test_update_no_tool_id( self ): workflow_object = self.workflow_populator.load_workflow( name="test_import" ) @@ -448,7 +448,7 @@ def test_import_annotations( self ): other_id = other_import_response.json()["id"] imported_workflow = self._show_workflow( other_id ) assert imported_workflow["annotation"] == "simple workflow" - step_annotations = set(map(lambda step: step["annotation"], imported_workflow["steps"].values())) + step_annotations = set([step["annotation"] for step in imported_workflow["steps"].values()]) assert "input1 description" in step_annotations def test_import_subworkflows( self ): @@ -536,7 +536,7 @@ def test_import_missing_tool( self ): workflow_id = self.workflow_populator.create_workflow( workflow ) workflow_description = self._show_workflow( workflow_id ) steps = workflow_description["steps"] - missing_tool_steps = filter(lambda v: v['tool_id'] == 'cat_missing_tool', steps.values()) + missing_tool_steps = [v for v in steps.values() if v['tool_id'] == 'cat_missing_tool'] assert len(missing_tool_steps) == 1 def test_import_no_tool_id( self ): @@ -642,7 +642,7 @@ def test_workflow_run_output_collections(self): self.__invoke_workflow( history_id, workflow_id, inputs ) time.sleep( 5 ) self.dataset_populator.wait_for_history( history_id, assert_ok=True ) - self.assertEquals("a\nc\nb\nd\n", self.dataset_populator.get_history_dataset_content( history_id, hid=0 ) ) + self.assertEqual("a\nc\nb\nd\n", self.dataset_populator.get_history_dataset_content( history_id, hid=0 ) ) @skip_without_tool( "collection_creates_pair" ) def test_workflow_run_output_collection_mapping(self): @@ -672,7 +672,7 @@ def test_workflow_run_output_collection_mapping(self): self.__invoke_workflow( history_id, workflow_id, inputs ) time.sleep( 5 ) self.dataset_populator.wait_for_history( history_id, assert_ok=True ) - self.assertEquals("a\nc\nb\nd\ne\ng\nf\nh\n", self.dataset_populator.get_history_dataset_content( history_id, hid=0 ) ) + self.assertEqual("a\nc\nb\nd\ne\ng\nf\nh\n", self.dataset_populator.get_history_dataset_content( history_id, hid=0 ) ) @skip_without_tool( "collection_split_on_column" ) def test_workflow_run_dynamic_output_collections(self): @@ -715,7 +715,7 @@ def test_workflow_run_dynamic_output_collections(self): last_item_hid = details["hid"] assert last_item_hid == 7, "Expected 7 history items, got %s" % last_item_hid content = self.dataset_populator.get_history_dataset_content( history_id, hid=0 ) - self.assertEquals("10.0\n30.0\n20.0\n40.0\n", content ) + self.assertEqual("10.0\n30.0\n20.0\n40.0\n", content ) @skip_without_tool( "collection_split_on_column" ) @skip_without_tool( "min_repeat" ) @@ -759,7 +759,7 @@ def test_workflow_run_dynamic_output_collections_2( self ): invocation_id = self.__invoke_workflow( history_id, workflow_id, inputs ) self.wait_for_invocation_and_jobs( history_id, workflow_id, invocation_id ) content = self.dataset_populator.get_history_dataset_content( history_id, hid=7 ) - self.assertEquals(content.strip(), "samp1\t10.0\nsamp2\t20.0") + self.assertEqual(content.strip(), "samp1\t10.0\nsamp2\t20.0") @skip_without_tool( "mapper" ) @skip_without_tool( "pileup" ) @@ -808,7 +808,7 @@ def test_run_subworkflow_simple( self ): self._run_jobs(SIMPLE_NESTED_WORKFLOW_YAML, history_id=history_id) content = self.dataset_populator.get_history_dataset_content( history_id ) - self.assertEquals("chr5\t131424298\t131424460\tCCDS4149.1_cds_0_0_chr5_131424299_f\t0\t+\nchr5\t131424298\t131424460\tCCDS4149.1_cds_0_0_chr5_131424299_f\t0\t+\n", content) + self.assertEqual("chr5\t131424298\t131424460\tCCDS4149.1_cds_0_0_chr5_131424299_f\t0\t+\nchr5\t131424298\t131424460\tCCDS4149.1_cds_0_0_chr5_131424299_f\t0\t+\n", content) @skip_without_tool( "cat1" ) @skip_without_tool( "collection_paired_test" ) @@ -851,7 +851,7 @@ def test_workflow_run_zip_collections( self ): invocation_id = self.__invoke_workflow( history_id, workflow_id, inputs ) self.wait_for_invocation_and_jobs( history_id, workflow_id, invocation_id ) content = self.dataset_populator.get_history_dataset_content( history_id ) - self.assertEquals(content.strip(), "samp1\t10.0\nsamp2\t20.0\nsamp1\t20.0\nsamp2\t40.0") + self.assertEqual(content.strip(), "samp1\t10.0\nsamp2\t20.0\nsamp1\t20.0\nsamp2\t40.0") def test_workflow_request( self ): workflow = self.workflow_populator.load_workflow( name="test_for_queue" ) @@ -943,7 +943,7 @@ def test_workflow_map_reduce_pause( self ): self.dataset_populator.wait_for_history( history_id, assert_ok=True ) invocation = self._invocation_details( uploaded_workflow_id, invocation_id ) assert invocation[ 'state' ] == 'scheduled' - self.assertEquals("reviewed\n1\nreviewed\n4\n", self.dataset_populator.get_history_dataset_content( history_id ) ) + self.assertEqual("reviewed\n1\nreviewed\n4\n", self.dataset_populator.get_history_dataset_content( history_id ) ) @skip_without_tool( "cat" ) def test_cancel_workflow_invocation( self ): @@ -1097,7 +1097,7 @@ def test_run_with_text_connection( self ): self.dataset_populator.wait_for_history( history_id, assert_ok=True ) content = self.dataset_populator.get_history_dataset_content( history_id ) - self.assertEquals("chr5\t131424298\t131424460\tCCDS4149.1_cds_0_0_chr5_131424299_f\t0\t+\n", content) + self.assertEqual("chr5\t131424298\t131424460\tCCDS4149.1_cds_0_0_chr5_131424299_f\t0\t+\n", content) def wait_for_invocation_and_jobs( self, history_id, workflow_id, invocation_id, assert_ok=True ): self.workflow_populator.wait_for_invocation( workflow_id, invocation_id ) @@ -1146,7 +1146,7 @@ def test_workflow_run_with_matching_lists( self ): run_workflow_response = self._post( "workflows", data=workflow_request ) self._assert_status_code_is( run_workflow_response, 200 ) self.dataset_populator.wait_for_history( history_id, assert_ok=True ) - self.assertEquals("1 2 3\n4 5 6\n7 8 9\n0 a b\n", self.dataset_populator.get_history_dataset_content( history_id ) ) + self.assertEqual("1 2 3\n4 5 6\n7 8 9\n0 a b\n", self.dataset_populator.get_history_dataset_content( history_id ) ) def test_workflow_stability( self ): # Run this index stability test with following command: @@ -1167,7 +1167,7 @@ def test_workflow_stability( self ): def _step_map(self, workflow): # Build dict mapping 'tep index to input name. step_map = {} - for step_index, step in workflow["steps"].iteritems(): + for step_index, step in workflow["steps"].items(): if step[ "type" ] == "data_input": step_map[step_index] = step["inputs"][0]["name"] return step_map @@ -1226,7 +1226,7 @@ def test_run_with_runtime_pja( self ): # would be added to the original workflow post job actions. workflow_id = workflow_request["workflow_id"] downloaded_workflow = self._download_workflow( workflow_id ) - pjas = downloaded_workflow[ "steps" ][ "2" ][ "post_job_actions" ].values() + pjas = list(downloaded_workflow[ "steps" ][ "2" ][ "post_job_actions" ].values()) assert len( pjas ) == 0, len( pjas ) @skip_without_tool( "cat1" ) @@ -1253,15 +1253,15 @@ def test_run_with_delayed_runtime_pja( self ): $link: the_pause """) downloaded_workflow = self._download_workflow( workflow_id ) - print downloaded_workflow - uuid_dict = dict( map( lambda (index, step): ( int( index ), step["uuid"] ), downloaded_workflow["steps"].iteritems() ) ) + print(downloaded_workflow) + uuid_dict = dict( [( int( index_step[0] ), index_step[1]["uuid"] ) for index_step in iter(downloaded_workflow["steps"].items())] ) history_id = self.dataset_populator.new_history() hda = self.dataset_populator.new_dataset( history_id, content="1 2 3" ) self.dataset_populator.wait_for_history( history_id ) inputs = { '0': self._ds_entry( hda ), } - print inputs + print(inputs) uuid2 = uuid_dict[ 3 ] workflow_request = {} workflow_request[ "replacement_params" ] = dumps( dict( replaceme="was replaced" ) ) @@ -1327,7 +1327,7 @@ def test_delete_intermediate_datasets_pja_1( self ): # I think hda3 should be deleted, but the inputs to # steps with workflow outputs are not deleted. # assert hda3["deleted"] - print hda3["deleted"] + print(hda3["deleted"]) assert not hda4["deleted"] @skip_without_tool( "random_lines1" ) @@ -1361,7 +1361,7 @@ def test_parameter_substitution_sanitization( self ): run_workflow_response, history_id = self._run_validation_workflow_with_substitions( substitions ) self.dataset_populator.wait_for_history( history_id, assert_ok=True ) - self.assertEquals("__dq__ X echo __dq__moo\n", self.dataset_populator.get_history_dataset_content( history_id, hid=1 ) ) + self.assertEqual("__dq__ X echo __dq__moo\n", self.dataset_populator.get_history_dataset_content( history_id, hid=1 ) ) @skip_without_tool( "validation_repeat" ) def test_parameter_substitution_validation_value_errors_0( self ): @@ -1439,14 +1439,14 @@ def test_run_replace_params_nested( self ): run_workflow_response = self._post( "workflows", data=workflow_request ) self._assert_status_code_is( run_workflow_response, 200 ) self.dataset_populator.wait_for_history( history_id, assert_ok=True ) - self.assertEquals("3\n", self.dataset_populator.get_history_dataset_content( history_id ) ) + self.assertEqual("3\n", self.dataset_populator.get_history_dataset_content( history_id ) ) def test_pja_import_export( self ): workflow = self.workflow_populator.load_workflow( name="test_for_pja_import", add_pja=True ) uploaded_workflow_id = self.workflow_populator.create_workflow( workflow ) downloaded_workflow = self._download_workflow( uploaded_workflow_id ) self._assert_has_keys( downloaded_workflow[ "steps" ], "0", "1", "2" ) - pjas = downloaded_workflow[ "steps" ][ "2" ][ "post_job_actions" ].values() + pjas = list(downloaded_workflow[ "steps" ][ "2" ][ "post_job_actions" ].values()) assert len( pjas ) == 1, len( pjas ) pja = pjas[ 0 ] self._assert_has_keys( pja, "action_type", "output_name", "action_arguments" ) @@ -1523,15 +1523,15 @@ def _random_lines_steps( self, workflow_request ): workflow_summary_response = self._get( "workflows/%s" % workflow_request[ "workflow_id" ] ) self._assert_status_code_is( workflow_summary_response, 200 ) steps = workflow_summary_response.json()[ "steps" ] - return sorted( filter(lambda step: step["tool_id"] == "random_lines1", steps.values()), key=lambda step: step["id"] ) + return sorted( [step for step in list(steps.values()) if step["tool_id"] == "random_lines1"], key=lambda step: step["id"] ) def _setup_random_x2_workflow( self, name ): workflow = self.workflow_populator.load_random_x2_workflow( name ) uploaded_workflow_id = self.workflow_populator.create_workflow( workflow ) workflow_inputs = self._workflow_inputs( uploaded_workflow_id ) - key = workflow_inputs.keys()[ 0 ] + key = list(workflow_inputs.keys())[ 0 ] history_id = self.dataset_populator.new_history() - ten_lines = "\n".join( map( str, range( 10 ) ) ) + ten_lines = "\n".join( map( str, list(range( 10)) ) ) hda1 = self.dataset_populator.new_dataset( history_id, content=ten_lines ) workflow_request = dict( history="hist_id=%s" % history_id, @@ -1558,7 +1558,7 @@ def __assert_lines_hid_line_count_is( self, history, hid, lines ): hda_summary = filter( lambda hc: hc[ "hid" ] == hid, history_contents_response.json() )[ 0 ] hda_info_response = self._get( "%s/%s" % ( contents_url, hda_summary[ "id" ] ) ) self._assert_status_code_is( hda_info_response, 200 ) - self.assertEquals( hda_info_response.json()[ "metadata_data_lines" ], lines ) + self.assertEqual( hda_info_response.json()[ "metadata_data_lines" ], lines ) def __invoke_workflow( self, history_id, workflow_id, inputs={}, request={}, assert_ok=True ): request["history"] = "hist_id=%s" % history_id, From 5939c5fc2666ecd649ab033c23d432f6572eb7b1 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Sat, 25 Jun 2016 18:58:37 -0400 Subject: [PATCH 21/86] Python3: lib/galaxy/dataset_collections/ and lib/galaxy/security/ --- .ci/py3_sources.txt | 2 ++ lib/galaxy/dataset_collections/structure.py | 4 ++-- lib/galaxy/security/__init__.py | 17 ++++++++--------- 3 files changed, 12 insertions(+), 11 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index 52cc6846b354..e204da353c51 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -3,6 +3,7 @@ cron/ lib/galaxy/actions/ lib/galaxy/auth/ lib/galaxy/config.py +lib/galaxy/dataset_collections/ lib/galaxy/datatypes/converters/interval_to_coverage.py lib/galaxy/dependencies/ lib/galaxy/eggs/ @@ -14,6 +15,7 @@ lib/galaxy/objectstore/ lib/galaxy/openid/ lib/galaxy/quota/ lib/galaxy/sample_tracking/ +lib/galaxy/security/ lib/galaxy/tags/ lib/galaxy/tools/cwl/ lib/galaxy/tools/parser/ diff --git a/lib/galaxy/dataset_collections/structure.py b/lib/galaxy/dataset_collections/structure.py index 8f1eab5a020c..d2e7cc3ed258 100644 --- a/lib/galaxy/dataset_collections/structure.py +++ b/lib/galaxy/dataset_collections/structure.py @@ -119,12 +119,12 @@ def multiply( self, other_structure ): return Tree( new_children, new_collection_type ) def clone( self ): - cloned_children = map( lambda (identifier, element): (identifier, element.clone()), self.children ) + cloned_children = [(_[0], _[1].clone()) for _ in self.children] return Tree( cloned_children, self.collection_type_description ) def dict_map( func, input_dict ): - return dict( [ ( k, func(v) ) for k, v in input_dict.iteritems() ] ) + return dict( [ ( k, func(v) ) for k, v in input_dict.items() ] ) def get_structure( dataset_collection_instance, collection_type_description, leaf_subcollection_type=None ): diff --git a/lib/galaxy/security/__init__.py b/lib/galaxy/security/__init__.py index 542fd040ee1b..94d105371f3f 100644 --- a/lib/galaxy/security/__init__.py +++ b/lib/galaxy/security/__init__.py @@ -3,7 +3,6 @@ """ import logging -import operator import socket from datetime import datetime, timedelta @@ -45,7 +44,7 @@ def get_action( self, name, default=None ): def get_actions( self ): """Get all permitted actions as a list of Action objects""" - return self.permitted_actions.__dict__.values() + return list(self.permitted_actions.__dict__.values()) def get_item_actions( self, action, item ): raise Exception( 'No valid method of retrieving action (%s) for item %s.' % ( action, item ) ) @@ -148,7 +147,7 @@ def convert_permitted_action_strings( self, permitted_action_strings ): When getting permitted actions from an untrusted source like a form, ensure that they match our actual permitted actions. """ - return filter( lambda x: x is not None, [ self.permitted_actions.get( action_string ) for action_string in permitted_action_strings ] ) + return [_ for _ in [ self.permitted_actions.get( action_string ) for action_string in permitted_action_strings ] if _ is not None] class GalaxyRBACAgent( RBACAgent ): @@ -180,9 +179,9 @@ def sort_by_attr( self, seq, attr ): # (seq[i].attr, i, seq[i]) and sort it. The second item of tuple is needed not # only to provide stable sorting, but mainly to eliminate comparison of objects # (which can be expensive or prohibited) in case of equal attribute values. - intermed = map( None, map( getattr, seq, ( attr, ) * len( seq ) ), xrange( len( seq ) ), seq ) + intermed = map( None, [getattr(_, attr) for _ in seq], range( len( seq ) ), seq) intermed.sort() - return map( operator.getitem, intermed, ( -1, ) * len( intermed ) ) + return [_[-1] for _ in intermed] def _get_npns_roles( self, trans ): """ @@ -718,10 +717,10 @@ def guess_derived_permissions_for_datasets( self, datasets=[] ): else: if action.model == 'grant': # intersect existing roles with new roles - perms[ action ] = filter( lambda x: x in perms[ action ], roles ) + perms[ action ] = [_ for _ in roles if _ in perms[ action ]] elif action.model == 'restrict': # join existing roles with new roles - perms[ action ].extend( filter( lambda x: x not in perms[ action ], roles ) ) + perms[ action ].extend( [_ for _ in roles if _ not in perms[ action ]] ) return perms def associate_components( self, **kwd ): @@ -798,7 +797,7 @@ def user_set_default_permissions( self, user, permissions={}, history=False, dat permissions = { self.permitted_actions.DATASET_MANAGE_PERMISSIONS: [ self.get_private_user_role( user, auto_create=True ) ] } # new_user_dataset_access_role_default_private is set as True in config file if default_access_private: - permissions[ self.permitted_actions.DATASET_ACCESS ] = permissions.values()[ 0 ] + permissions[ self.permitted_actions.DATASET_ACCESS ] = next(iter(permissions.values())) # Delete all of the current default permissions for the user for dup in user.default_permissions: self.sa_session.delete( dup ) @@ -987,7 +986,7 @@ def privately_share_dataset( self, dataset, users=[] ): sharing_role = None if intersect: for role in intersect: - if not filter( lambda x: x not in users, [ ura.user for ura in role.users ] ): + if not [_ for _ in [ ura.user for ura in role.users ] if _ not in users]: # only use a role if it contains ONLY the users we're sharing with sharing_role = role break From 1fadc336a0bb228c9b96f3826e44fec327261740 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Sat, 25 Jun 2016 19:30:17 -0400 Subject: [PATCH 22/86] Python3: lib/galaxy/datatypes/binary.py --- .ci/py3_sources.txt | 1 + lib/galaxy/datatypes/binary.py | 13 +++++++------ 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index e204da353c51..f6744cb13137 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -4,6 +4,7 @@ lib/galaxy/actions/ lib/galaxy/auth/ lib/galaxy/config.py lib/galaxy/dataset_collections/ +lib/galaxy/datatypes/binary.py lib/galaxy/datatypes/converters/interval_to_coverage.py lib/galaxy/dependencies/ lib/galaxy/eggs/ diff --git a/lib/galaxy/datatypes/binary.py b/lib/galaxy/datatypes/binary.py index 4b4e2857646f..a7b3a3927e5b 100644 --- a/lib/galaxy/datatypes/binary.py +++ b/lib/galaxy/datatypes/binary.py @@ -1,4 +1,5 @@ """Binary classes""" +from __future__ import print_function import binascii import gzip @@ -234,7 +235,7 @@ def merge(split_files, output_file): shutil.rmtree(tmp_dir) # clean up raise Exception( "Error merging BAM files: %s" % stderr ) else: - print stderr + print(stderr) os.unlink(stderr_name) os.rmdir(tmp_dir) @@ -322,7 +323,7 @@ def groom_dataset_content( self, file_name ): shutil.rmtree( tmp_dir) # clean up raise Exception( "Error Grooming BAM file contents: %s" % stderr ) else: - print stderr + print(stderr) # Move samtools_created_sorted_file_name to our output dataset location shutil.move( samtools_created_sorted_file_name, file_name ) # Remove temp file and empty temporary directory @@ -365,7 +366,7 @@ def set_meta( self, dataset, overwrite=True, **kwd ): os.unlink( stderr_name ) # clean up raise Exception( "Error Setting BAM Metadata: %s" % stderr ) else: - print stderr + print(stderr) dataset.metadata.bam_index = index_file # Remove temp file os.unlink( stderr_name ) @@ -932,7 +933,7 @@ def sniff( self, filename ): c = conn.cursor() tables_query = "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name" result = c.execute( tables_query ).fetchall() - result = map( lambda x: x[0], result ) + result = [_[0] for _ in result] for table_name in gemini_table_names: if table_name not in result: return False @@ -971,7 +972,7 @@ def sniff( self, filename ): c = conn.cursor() tables_query = "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name" result = c.execute( tables_query ).fetchall() - result = map( lambda x: x[0], result ) + result = [_[0] for _ in result] for table_name in mz_table_names: if table_name not in result: return False @@ -1006,7 +1007,7 @@ def sniff( self, filename ): c = conn.cursor() tables_query = "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name" result = c.execute( tables_query ).fetchall() - result = map( lambda x: x[0], result ) + result = [_[0] for _ in result] for table_name in mz_table_names: if table_name not in result: return False From c9ad6e31ba5a29d1c7d23842ea23fb6d222738c1 Mon Sep 17 00:00:00 2001 From: Timur Shtatland Date: Sat, 25 Jun 2016 19:43:36 -0400 Subject: [PATCH 23/86] Python3: lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.py --- .ci/py3_sources.txt | 1 + .../datatypes/converters/interval_to_bedstrict_converter.py | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index f6744cb13137..4f0176daee24 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -5,6 +5,7 @@ lib/galaxy/auth/ lib/galaxy/config.py lib/galaxy/dataset_collections/ lib/galaxy/datatypes/binary.py +lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.py lib/galaxy/datatypes/converters/interval_to_coverage.py lib/galaxy/dependencies/ lib/galaxy/eggs/ diff --git a/lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.py b/lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.py index 87ef467807f3..64a61825c92a 100644 --- a/lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.py +++ b/lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.py @@ -138,7 +138,7 @@ def __main__(): except: name = "region_%i" % count try: - fields = map( str, [ region.chrom, region.start, region.end, name, 0, region.strand ] ) + fields = [str(item) for item in (region.chrom, region.start, region.end, name, 0, region.strand)] if force_num_columns is not None and len( fields ) != force_num_columns: fields = force_bed_field_count( fields, count, force_num_columns ) out.write( "%s\n" % '\t'.join( fields ) ) @@ -147,9 +147,9 @@ def __main__(): if first_skipped_line is None: first_skipped_line = count + 1 out.close() - print "%i regions converted to BED." % ( count + 1 - skipped_lines ) + print("%i regions converted to BED." % ( count + 1 - skipped_lines )) if skipped_lines > 0: - print "Skipped %d blank or invalid lines starting with line # %d." % ( skipped_lines, first_skipped_line ) + print("Skipped %d blank or invalid lines starting with line # %d." % ( skipped_lines, first_skipped_line )) if __name__ == "__main__": __main__() From 578ed609b7f836733c7225e5ee44a09536a39160 Mon Sep 17 00:00:00 2001 From: Timur Shtatland Date: Sat, 25 Jun 2016 19:56:39 -0400 Subject: [PATCH 24/86] Python3: lib/galaxy/datatypes/converters/interval_to_bed_converter.py --- lib/galaxy/datatypes/converters/interval_to_bed_converter.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/datatypes/converters/interval_to_bed_converter.py b/lib/galaxy/datatypes/converters/interval_to_bed_converter.py index 6942449d3b2e..9e9022bd1be0 100644 --- a/lib/galaxy/datatypes/converters/interval_to_bed_converter.py +++ b/lib/galaxy/datatypes/converters/interval_to_bed_converter.py @@ -55,9 +55,9 @@ def __main__(): if not first_skipped_line: first_skipped_line = count + 1 out.close() - print "%i regions converted to BED." % ( count + 1 - skipped_lines ) + print("%i regions converted to BED." % ( count + 1 - skipped_lines )) if skipped_lines > 0: - print "Skipped %d blank or invalid lines starting with line # %d." % ( skipped_lines, first_skipped_line ) + print("Skipped %d blank or invalid lines starting with line # %d." % ( skipped_lines, first_skipped_line )) if __name__ == "__main__": __main__() From 4b9f433f2d9b63b04477063e7321a8ac19fc370c Mon Sep 17 00:00:00 2001 From: Timur Shtatland Date: Sat, 25 Jun 2016 19:58:19 -0400 Subject: [PATCH 25/86] Forgot py3_sources.txt for prev commit --- .ci/py3_sources.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index 4f0176daee24..8bdfe8f39120 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -5,6 +5,7 @@ lib/galaxy/auth/ lib/galaxy/config.py lib/galaxy/dataset_collections/ lib/galaxy/datatypes/binary.py +lib/galaxy/datatypes/converters/interval_to_bed_converter.py lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.py lib/galaxy/datatypes/converters/interval_to_coverage.py lib/galaxy/dependencies/ From a04f6589eae995352e5bbcc9c0a73481a3abe8a8 Mon Sep 17 00:00:00 2001 From: Junzhou Wang Date: Sat, 25 Jun 2016 19:52:38 -0400 Subject: [PATCH 26/86] Python3: test/shed_functional/ and test/unit/ --- .ci/py3_sources.txt | 5 ++++ scripts/api/data_manager_example_execute.py | 19 ++++++++------- test/shed_functional/base/twilltestcase.py | 22 ++++++++--------- test/shed_functional/functional_tests.py | 6 ++--- .../dataproviders/test_base_dataproviders.py | 24 +++++++++---------- .../dataproviders/test_line_dataproviders.py | 7 +++--- 6 files changed, 44 insertions(+), 39 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index 8bdfe8f39120..42e9bdc06e7e 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -47,6 +47,7 @@ lib/tool_shed/tools/ lib/tool_shed/util/ lib/tool_shed/utility_containers/ scripts/api/common.py +scripts/api/data_manager_example_execute.py scripts/api/display.py scripts/api/workflow_execute_parameters.py scripts/auth/ @@ -74,3 +75,7 @@ test/api/test_datasets.py test/api/test_tool_data.py test/api/test_workflow_extraction.py test/api/test_workflows.py +test/shed_functional/base/twilltestcase.py +test/shed_functional/functional_tests.py +test/unit/datatypes/dataproviders/test_base_dataproviders.py +test/unit/datatypes/dataproviders/test_line_dataproviders.py diff --git a/scripts/api/data_manager_example_execute.py b/scripts/api/data_manager_example_execute.py index c478399703b7..5e1c84bdf436 100644 --- a/scripts/api/data_manager_example_execute.py +++ b/scripts/api/data_manager_example_execute.py @@ -3,9 +3,10 @@ # Very simple example of using the API to run Data Managers # Script makes the naive assumption that dbkey==sequence id, which in many cases is not true nor desired # *** This script is not recommended for use as-is on a production server *** +from __future__ import print_function import optparse import time -import urlparse +from six.moves.urllib.parse import urljoin from common import get, post @@ -17,7 +18,7 @@ def run_tool( tool_id, history_id, params, api_key, galaxy_url, wait=True, sleep_time=None, **kwargs ): sleep_time = sleep_time or DEFAULT_SLEEP_TIME - tools_url = urlparse.urljoin( galaxy_url, 'api/tools' ) + tools_url = urljoin( galaxy_url, 'api/tools' ) payload = { 'tool_id': tool_id, } @@ -41,7 +42,7 @@ def run_tool( tool_id, history_id, params, api_key, galaxy_url, wait=True, sleep def get_dataset_state( hda_id, api_key, galaxy_url ): - datasets_url = urlparse.urljoin( galaxy_url, 'api/datasets/%s' % hda_id ) + datasets_url = urljoin( galaxy_url, 'api/datasets/%s' % hda_id ) dataset_info = get( api_key, datasets_url ) return dataset_info['state'] @@ -63,9 +64,9 @@ def dataset_is_terminal( hda_id, api_key, galaxy_url ): assert options.dbkeys, ValueError( 'You must specify at least one dbkey to use.' ) # check user is admin - configuration_options = get( options.api_key, urlparse.urljoin( options.base_url, 'api/configuration' ) ) + configuration_options = get( options.api_key, urljoin( options.base_url, 'api/configuration' ) ) if 'library_import_dir' not in configuration_options: # hack to check if is admin user - print "Warning: Data Managers are only available to admin users. The API Key provided does not appear to belong to an admin user. Will attempt to run anyway." + print("Warning: Data Managers are only available to admin users. The API Key provided does not appear to belong to an admin user. Will attempt to run anyway.") # Fetch Genomes dbkeys = {} @@ -75,7 +76,7 @@ def dataset_is_terminal( hda_id, api_key, galaxy_url ): else: "dbkey (%s) was specified more than once, skipping additional specification." % ( dbkey ) - print 'Genomes Queued for downloading.' + print('Genomes Queued for downloading.') # Start indexers indexing_tools = [] @@ -88,16 +89,16 @@ def dataset_is_terminal( hda_id, api_key, galaxy_url ): if dbkeys: time.sleep( options.sleep_time ) - print 'All genomes downloaded and indexers now queued.' + print('All genomes downloaded and indexers now queued.') # Wait for indexers to finish while indexing_tools: for i, indexing_tool_value in enumerate( indexing_tools ): if dataset_is_terminal( indexing_tool_value['outputs'][0]['id'], options.api_key, options.base_url ): - print 'Finished:', indexing_tool_value + print('Finished:', indexing_tool_value) del indexing_tools[i] break if indexing_tools: time.sleep( options.sleep_time ) - print 'All indexers have been run, please check results.' + print('All indexers have been run, please check results.') diff --git a/test/shed_functional/base/twilltestcase.py b/test/shed_functional/base/twilltestcase.py index a8765c91512e..83598a69f719 100644 --- a/test/shed_functional/base/twilltestcase.py +++ b/test/shed_functional/base/twilltestcase.py @@ -6,7 +6,7 @@ import tarfile import tempfile import time -import urllib +from six.moves.urllib.parse import urlencode, quote_plus from json import loads import twill.commands as tc @@ -412,7 +412,7 @@ def display_installed_manage_data_manager_page( self, installed_repository, data for data_manager_name in data_manager_names: assert data_manager_name in data_managers, "The requested Data Manager '%s' was not found in repository metadata." % data_manager_name else: - data_manager_name = data_managers.keys() + data_manager_name = list(data_managers.keys()) for data_manager_name in data_manager_names: url = '/data_manager/manage_data_manager?id=%s' % data_managers[data_manager_name]['guid'] self.visit_galaxy_url( url ) @@ -427,7 +427,7 @@ def display_installed_repository_manage_page( self, installed_repository, string self.visit_galaxy_url( url ) strings_displayed.append( str( installed_repository.installed_changeset_revision ) ) # Every place Galaxy's XXXX tool appears in attribute - need to quote. - strings_displayed = map( lambda x: x.replace("'", "'"), strings_displayed ) + strings_displayed = [x.replace("'", "'") for x in strings_displayed] self.check_for_strings( strings_displayed, strings_not_displayed ) def display_installed_workflow_image( self, repository, workflow_name, strings_displayed=None, strings_not_displayed=None ): @@ -818,7 +818,7 @@ def get_tools_from_repository_metadata( self, repository, include_invalid=False def get_tool_panel_section_from_api( self, metadata ): tool_metadata = metadata[ 'tools' ] - tool_guid = urllib.quote_plus( tool_metadata[ 0 ][ 'guid' ], safe='' ) + tool_guid = quote_plus( tool_metadata[ 0 ][ 'guid' ], safe='' ) api_url = '/%s' % '/'.join( [ 'api', 'tools', tool_guid ] ) self.visit_galaxy_url( api_url ) tool_dict = loads( self.last_page() ) @@ -1020,12 +1020,12 @@ def load_changeset_in_tool_shed( self, repository_id, changeset_revision, string self.check_for_strings( strings_displayed, strings_not_displayed ) def load_checkable_revisions( self, strings_displayed=None, strings_not_displayed=None ): - params = urllib.urlencode( dict( do_not_test='false', - downloadable='true', - includes_tools='true', - malicious='false', - missing_test_components='false', - skip_tool_test='false' ) ) + params = urlencode( dict( do_not_test='false', + downloadable='true', + includes_tools='true', + malicious='false', + missing_test_components='false', + skip_tool_test='false' ) ) api_url = '%s?%s' % ( '/'.join( [ self.url, 'api', 'repository_revisions' ] ), params ) self.visit_url( api_url ) self.check_for_strings( strings_displayed, strings_not_displayed ) @@ -1215,7 +1215,7 @@ def set_form_value( self, form, kwd, field_name, field_value ): kwd[ field_name ] = str( field_value ) else: if field_name in kwd: - log.debug( 'No field %s in form %s, discarding from return value.' % ( str( control ), str( form_id ) ) ) + log.debug( 'No field %s in form %s, discarding from return value.' % ( str( controls ), str( form_id ) ) ) del( kwd[ field_name ] ) return kwd diff --git a/test/shed_functional/functional_tests.py b/test/shed_functional/functional_tests.py index c5f8b2a541f5..8a2511f4bd1c 100644 --- a/test/shed_functional/functional_tests.py +++ b/test/shed_functional/functional_tests.py @@ -3,7 +3,7 @@ Launch this script by running ``run_tests.sh -t`` from GALAXY_ROOT. """ -from __future__ import absolute_import +from __future__ import absolute_import, print_function import os import string @@ -76,7 +76,7 @@ def setup(self): galaxy_migrated_tool_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir ) hgweb_config_dir = hgweb_config_file_path os.environ[ 'TEST_HG_WEB_CONFIG_DIR' ] = hgweb_config_dir - print "Directory location for hgweb.config:", hgweb_config_dir + print("Directory location for hgweb.config:", hgweb_config_dir) toolshed_database_conf = driver_util.database_conf(shed_db_path, prefix="TOOL_SHED") kwargs = dict( admin_users='test@bx.psu.edu', allow_user_creation=True, @@ -152,7 +152,7 @@ def setup(self): update_integrated_tool_panel=True, ) ) - print "Galaxy database connection:", kwargs["database_connection"] + print("Galaxy database connection:", kwargs["database_connection"]) # ---- Run galaxy webserver ------------------------------------------------------ galaxyapp = driver_util.build_galaxy_app(kwargs) diff --git a/test/unit/datatypes/dataproviders/test_base_dataproviders.py b/test/unit/datatypes/dataproviders/test_base_dataproviders.py index b1a949a81fd9..e4c1025339e8 100644 --- a/test/unit/datatypes/dataproviders/test_base_dataproviders.py +++ b/test/unit/datatypes/dataproviders/test_base_dataproviders.py @@ -6,7 +6,7 @@ import os.path import imp import unittest -import StringIO +from six import StringIO import logging log = logging.getLogger( __name__ ) @@ -80,23 +80,23 @@ def contents_provider_and_data( self, return ( contents, provider, data ) def test_iterators( self ): - source = ( str( x ) for x in xrange( 1, 10 ) ) + source = ( str( x ) for x in range( 1, 10 ) ) provider = self.provider_class( source ) data = list( provider ) log.debug( 'data: %s', str( data ) ) - self.assertEqual( data, [ str( x ) for x in xrange( 1, 10 ) ] ) + self.assertEqual( data, [ str( x ) for x in range( 1, 10 ) ] ) - source = ( str( x ) for x in xrange( 1, 10 ) ) + source = ( str( x ) for x in range( 1, 10 ) ) provider = self.provider_class( source ) data = list( provider ) log.debug( 'data: %s', str( data ) ) - self.assertEqual( data, [ str( x ) for x in xrange( 1, 10 ) ] ) + self.assertEqual( data, [ str( x ) for x in range( 1, 10 ) ] ) - source = ( str( x ) for x in xrange( 1, 10 ) ) + source = ( str( x ) for x in range( 1, 10 ) ) provider = self.provider_class( source ) data = list( provider ) log.debug( 'data: %s', str( data ) ) - self.assertEqual( data, [ str( x ) for x in xrange( 1, 10 ) ] ) + self.assertEqual( data, [ str( x ) for x in range( 1, 10 ) ] ) def test_validate_source( self ): """validate_source should throw an error if the source doesn't have attr '__iter__' @@ -111,7 +111,7 @@ def non_iterator_dprov( source ): def test_writemethods( self ): """should throw an error if any write methods are called """ - source = ( str( x ) for x in xrange( 1, 10 ) ) + source = ( str( x ) for x in range( 1, 10 ) ) provider = self.provider_class( source ) # should throw error @@ -126,11 +126,11 @@ def call_method( provider, method_name, *args ): def test_readlines( self ): """readlines should return all the data in list form """ - source = ( str( x ) for x in xrange( 1, 10 ) ) + source = ( str( x ) for x in range( 1, 10 ) ) provider = self.provider_class( source ) data = provider.readlines() log.debug( 'data: %s', str( data ) ) - self.assertEqual( data, [ str( x ) for x in xrange( 1, 10 ) ] ) + self.assertEqual( data, [ str( x ) for x in range( 1, 10 ) ] ) def test_stringio( self ): """should work with StringIO @@ -140,7 +140,7 @@ def test_stringio( self ): Two Three """ ) - source = StringIO.StringIO( contents ) + source = StringIO( contents ) provider = self.provider_class( source ) data = list( provider ) log.debug( 'data: %s', str( data ) ) @@ -154,7 +154,7 @@ def test_file( self ): ( contents, provider, data ) = self.contents_provider_and_data() self.assertEqual( data, self.parses_default_content_as() ) # provider should call close on file - self.assertTrue( isinstance( provider.source, file ) ) + self.assertTrue( hasattr(provider.source, 'read')) self.assertTrue( provider.source.closed ) diff --git a/test/unit/datatypes/dataproviders/test_line_dataproviders.py b/test/unit/datatypes/dataproviders/test_line_dataproviders.py index d060cc68c792..56767e787ca3 100644 --- a/test/unit/datatypes/dataproviders/test_line_dataproviders.py +++ b/test/unit/datatypes/dataproviders/test_line_dataproviders.py @@ -4,16 +4,15 @@ """ import imp -import os import unittest - +import os.path import logging log = logging.getLogger( __name__ ) test_utils = imp.load_source( 'test_utils', os.path.join( os.path.dirname( __file__), '../../unittest_utils/utility.py' ) ) -import test_base_dataproviders +from . import test_base_dataproviders from galaxy.datatypes.dataproviders import line @@ -178,7 +177,7 @@ def test_file( self ): ( contents, provider, data ) = self.contents_provider_and_data() self.assertEqual( data, self.parses_default_content_as() ) self.assertTrue( isinstance( provider.source, line.FilteredLineDataProvider ) ) - self.assertTrue( isinstance( provider.source.source, file ) ) + self.assertTrue( hasattr(provider.source.source, 'read' ) ) # provider should call close on file self.assertTrue( provider.source.source.closed ) From 44d895ae500b1d20ec615aa839cfd2f56efc6d30 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Sat, 25 Jun 2016 20:07:42 -0400 Subject: [PATCH 27/86] Python3: lib/galaxy/datatypes/sequence.py --- .ci/py3_sources.txt | 1 + lib/galaxy/datatypes/sequence.py | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index 42e9bdc06e7e..c50ad21206bb 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -8,6 +8,7 @@ lib/galaxy/datatypes/binary.py lib/galaxy/datatypes/converters/interval_to_bed_converter.py lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.py lib/galaxy/datatypes/converters/interval_to_coverage.py +lib/galaxy/datatypes/sequence.py lib/galaxy/dependencies/ lib/galaxy/eggs/ lib/galaxy/exceptions/ diff --git a/lib/galaxy/datatypes/sequence.py b/lib/galaxy/datatypes/sequence.py index 8762b2524ae6..1b4619d731e9 100644 --- a/lib/galaxy/datatypes/sequence.py +++ b/lib/galaxy/datatypes/sequence.py @@ -10,6 +10,8 @@ import string from cgi import escape +from six import PY3 + from galaxy import util from galaxy.datatypes import metadata from galaxy.util.checkers import is_gzip @@ -21,6 +23,8 @@ import bx.align.maf +if PY3: + long = int log = logging.getLogger(__name__) From f01c98bdccb8f4215f3beb525a7dc5e43bc061b4 Mon Sep 17 00:00:00 2001 From: Timur Shtatland Date: Sat, 25 Jun 2016 20:18:41 -0400 Subject: [PATCH 28/86] Python3: more lib/galaxy/datatypes/converters/ lib/galaxy/datatypes/converters/fastq_to_fqtoc.py lib/galaxy/datatypes/converters/gff_to_bed_converter.py --- .ci/py3_sources.txt | 2 ++ lib/galaxy/datatypes/converters/fastq_to_fqtoc.py | 2 +- lib/galaxy/datatypes/converters/gff_to_bed_converter.py | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index c50ad21206bb..a16baacd2606 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -5,6 +5,8 @@ lib/galaxy/auth/ lib/galaxy/config.py lib/galaxy/dataset_collections/ lib/galaxy/datatypes/binary.py +lib/galaxy/datatypes/converters/fastq_to_fqtoc.py +lib/galaxy/datatypes/converters/gff_to_bed_converter.py lib/galaxy/datatypes/converters/interval_to_bed_converter.py lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.py lib/galaxy/datatypes/converters/interval_to_coverage.py diff --git a/lib/galaxy/datatypes/converters/fastq_to_fqtoc.py b/lib/galaxy/datatypes/converters/fastq_to_fqtoc.py index bb9f0b488a9c..4f5d9e525f60 100644 --- a/lib/galaxy/datatypes/converters/fastq_to_fqtoc.py +++ b/lib/galaxy/datatypes/converters/fastq_to_fqtoc.py @@ -18,7 +18,7 @@ def main(): """ input_fname = sys.argv[1] if is_gzip(input_fname): - print 'Conversion is only possible for uncompressed files' + print('Conversion is only possible for uncompressed files') sys.exit(1) out_file = open(sys.argv[2], 'w') diff --git a/lib/galaxy/datatypes/converters/gff_to_bed_converter.py b/lib/galaxy/datatypes/converters/gff_to_bed_converter.py index eaba23014cc2..d2ea88c0ef6b 100644 --- a/lib/galaxy/datatypes/converters/gff_to_bed_converter.py +++ b/lib/galaxy/datatypes/converters/gff_to_bed_converter.py @@ -38,7 +38,7 @@ def __main__(): info_msg = "%i lines converted to BED. " % ( i + 1 - skipped_lines ) if skipped_lines > 0: info_msg += "Skipped %d blank/comment/invalid lines starting with line #%d." % ( skipped_lines, first_skipped_line ) - print info_msg + print(info_msg) if __name__ == "__main__": __main__() From dc37a5f19c456376a8f9d996a5a73fbd1c42e212 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Sat, 25 Jun 2016 20:22:06 -0400 Subject: [PATCH 29/86] flake8 2.6.2 released --- tox.ini | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tox.ini b/tox.ini index a0a82bbb322e..17abc6c9fb26 100644 --- a/tox.ini +++ b/tox.ini @@ -6,22 +6,22 @@ skipsdist = True [testenv:py27-lint] commands = bash .ci/flake8_wrapper.sh whitelist_externals = bash -deps = flake8==2.6.0 +deps = flake8 [testenv:py33-lint] commands = bash .ci/flake8_py3_wrapper.sh whitelist_externals = bash -deps = flake8==2.6.0 +deps = flake8 [testenv:py34-lint] commands = bash .ci/flake8_py3_wrapper.sh whitelist_externals = bash -deps = flake8==2.6.0 +deps = flake8 [testenv:py35-lint] commands = bash .ci/flake8_py3_wrapper.sh whitelist_externals = bash -deps = flake8==2.6.0 +deps = flake8 [testenv:py27-unit] commands = bash run_tests.sh --no-create-venv -u From 6ea74e2c1f272dc7c71e480a98b6c125a6fafde6 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Sat, 25 Jun 2016 20:27:13 -0400 Subject: [PATCH 30/86] Python3: lib/galaxy/datatypes/sniff.py --- .ci/py3_sources.txt | 1 + lib/galaxy/datatypes/sniff.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index a16baacd2606..b51e3d650665 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -11,6 +11,7 @@ lib/galaxy/datatypes/converters/interval_to_bed_converter.py lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.py lib/galaxy/datatypes/converters/interval_to_coverage.py lib/galaxy/datatypes/sequence.py +lib/galaxy/datatypes/sniff.py lib/galaxy/dependencies/ lib/galaxy/eggs/ lib/galaxy/exceptions/ diff --git a/lib/galaxy/datatypes/sniff.py b/lib/galaxy/datatypes/sniff.py index 24ab0780a011..609a46c6419f 100644 --- a/lib/galaxy/datatypes/sniff.py +++ b/lib/galaxy/datatypes/sniff.py @@ -198,7 +198,7 @@ def get_headers( fname, sep, count=60, is_multi_byte=False ): [['chr7', '127475281', '127491632', 'NM_000230', '0', '+', '127486022', '127488767', '0', '3', '29,172,3225,', '0,10713,13126,'], ['chr7', '127486011', '127488900', 'D49487', '0', '+', '127486022', '127488767', '0', '2', '155,490,', '0,2399']] """ headers = [] - for idx, line in enumerate(file(fname)): + for idx, line in enumerate(open(fname)): line = line.rstrip('\n\r') if is_multi_byte: # TODO: fix this - sep is never found in line @@ -470,7 +470,7 @@ def handle_uploaded_dataset_file( filename, datatypes_registry, ext='auto', is_m COMPRESSION_CHECK_FUNCTIONS = [ ( 'gzip', is_gzip ) ] COMPRESSION_DATATYPES = dict( gzip=[ 'bam' ] ) COMPRESSED_EXTENSIONS = [] -for exts in COMPRESSION_DATATYPES.itervalues(): +for exts in COMPRESSION_DATATYPES.values(): COMPRESSED_EXTENSIONS.extend( exts ) From 9fd77057339260e3c0c2cba802510e93db761c6f Mon Sep 17 00:00:00 2001 From: Timur Shtatland Date: Sat, 25 Jun 2016 20:31:32 -0400 Subject: [PATCH 31/86] Python3: lib/galaxy/datatypes/converters/bed_to_gff_converter.py --- .ci/py3_sources.txt | 1 + lib/galaxy/datatypes/converters/bed_to_gff_converter.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index b51e3d650665..8ea6575c4b07 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -5,6 +5,7 @@ lib/galaxy/auth/ lib/galaxy/config.py lib/galaxy/dataset_collections/ lib/galaxy/datatypes/binary.py +lib/galaxy/datatypes/converters/bed_to_gff_converter.py lib/galaxy/datatypes/converters/fastq_to_fqtoc.py lib/galaxy/datatypes/converters/gff_to_bed_converter.py lib/galaxy/datatypes/converters/interval_to_bed_converter.py diff --git a/lib/galaxy/datatypes/converters/bed_to_gff_converter.py b/lib/galaxy/datatypes/converters/bed_to_gff_converter.py index 8d6795b3e7ac..c67f798a1323 100644 --- a/lib/galaxy/datatypes/converters/bed_to_gff_converter.py +++ b/lib/galaxy/datatypes/converters/bed_to_gff_converter.py @@ -69,7 +69,7 @@ def __main__(): info_msg = "%i lines converted to GFF version 2. " % ( i + 1 - skipped_lines ) if skipped_lines > 0: info_msg += "Skipped %d blank/comment/invalid lines starting with line #%d." % ( skipped_lines, first_skipped_line ) - print info_msg + print(info_msg) if __name__ == "__main__": __main__() From 3f4f7af2f147ef534372e903b68e1877e86fc8cb Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Sat, 25 Jun 2016 20:35:49 -0400 Subject: [PATCH 32/86] Python3: lib/galaxy/datatypes/tabular.py --- .ci/py3_sources.txt | 1 + lib/galaxy/datatypes/tabular.py | 19 ++++++++++++------- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index 8ea6575c4b07..2ab492579187 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -13,6 +13,7 @@ lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.py lib/galaxy/datatypes/converters/interval_to_coverage.py lib/galaxy/datatypes/sequence.py lib/galaxy/datatypes/sniff.py +lib/galaxy/datatypes/tabular.py lib/galaxy/dependencies/ lib/galaxy/eggs/ lib/galaxy/exceptions/ diff --git a/lib/galaxy/datatypes/tabular.py b/lib/galaxy/datatypes/tabular.py index e2f021fa2737..b89dcc9133d9 100644 --- a/lib/galaxy/datatypes/tabular.py +++ b/lib/galaxy/datatypes/tabular.py @@ -13,6 +13,8 @@ from cgi import escape from json import dumps +from six import PY3 + from galaxy import util from galaxy.datatypes import data, metadata from galaxy.datatypes.metadata import MetadataElement @@ -21,6 +23,9 @@ from . import dataproviders +if PY3: + long = int + log = logging.getLogger(__name__) @@ -853,10 +858,10 @@ def set_meta( self, dataset, overwrite=True, skip=None, max_data_lines=5, **kwd dataset.metadata.comment_lines = 0 dataset.metadata.columns = 21 dataset.metadata.column_types = ['str', 'int', 'int', 'int', 'int', 'int', 'str', 'int', 'str', 'str', 'str', 'str', 'str', 'str', 'str', 'str', 'str', 'str', 'str', 'str', 'str'] - dataset.metadata.lanes = lanes.keys() + dataset.metadata.lanes = list(lanes.keys()) dataset.metadata.tiles = ["%04d" % int(t) for t in tiles.keys()] - dataset.metadata.barcodes = filter(lambda x: x != '0', barcodes.keys()) + ['NoIndex' for x in barcodes.keys() if x == '0'] - dataset.metadata.reads = reads.keys() + dataset.metadata.barcodes = [_ for _ in barcodes.keys() if _ != '0'] + ['NoIndex' for _ in barcodes.keys() if _ == '0'] + dataset.metadata.reads = list(reads.keys()) class ElandMulti( Tabular ): @@ -919,7 +924,7 @@ def sniff( self, filename ): # check the dialect works reader = csv.reader(open(filename, 'r'), self.dialect) # Check we can read header and get columns - header_row = reader.next() + header_row = next(reader) if len(header_row) < 2: # No columns so not separated by this dialect. return False @@ -937,7 +942,7 @@ def sniff( self, filename ): if not found_second_line: return False else: - data_row = reader.next() + data_row = next(reader) if len(data_row) < 2: # No columns so not separated by this dialect. return False @@ -975,8 +980,8 @@ def set_meta( self, dataset, **kwd ): data_row = None header_row = None try: - header_row = reader.next() - data_row = reader.next() + header_row = next(reader) + data_row = next(reader) for row in reader: pass except csv.Error as e: From e45f8abb54a3008e11eb22a478c0521d39626629 Mon Sep 17 00:00:00 2001 From: Junzhou Wang Date: Sat, 25 Jun 2016 20:44:44 -0400 Subject: [PATCH 33/86] Python3:script/api/ --- .ci/py3_sources.txt | 22 +++++++++++++ scripts/api/example_watch_folder.py | 11 ++++--- scripts/api/form_create_from_xml.py | 3 +- scripts/api/history_create_history.py | 3 +- scripts/api/history_delete_history.py | 3 +- .../api/import_library_dataset_to_history.py | 5 +-- ...ows_from_installed_tool_shed_repository.py | 3 +- scripts/api/library_create_folder.py | 5 +-- scripts/api/library_create_library.py | 3 +- scripts/api/library_upload_from_import_dir.py | 3 +- scripts/api/load_data_with_metadata.py | 9 ++--- scripts/api/repair_tool_shed_repository.py | 3 +- scripts/api/request_type_create_from_xml.py | 3 +- scripts/api/requests_update_state.py | 3 +- scripts/api/sample_dataset_update_status.py | 5 +-- scripts/api/sample_update_state.py | 3 +- scripts/api/search.py | 33 ++++++++++--------- scripts/api/sequencer_configuration_create.py | 3 +- scripts/api/upload_to_history.py | 15 +++++---- scripts/api/workflow_delete.py | 3 +- scripts/api/workflow_execute.py | 3 +- scripts/api/workflow_import.py | 3 +- .../api/workflow_import_from_file_rpark.py | 3 +- 23 files changed, 97 insertions(+), 53 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index 2ab492579187..a828fdbca07a 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -53,6 +53,28 @@ lib/tool_shed/tools/ lib/tool_shed/util/ lib/tool_shed/utility_containers/ scripts/api/common.py +scripts/api/example_watch_folder.py +scripts/api/form_create_from_xml.py +scripts/api/history_delete_history.py +scripts/api/import_library_dataset_to_history.py +scripts/api/import_workflows_from_installed_tool_shed_repository.py +scripts/api/library_create_folder.py +scripts/api/library_create_library.py +scripts/api/library_upload_from_import_dir.py +scripts/api/load_data_with_metadata.py +scripts/api/repair_tool_shed_repository.py +scripts/api/requests_update_state.py +scripts/api/request_type_create_from_xml.py +scripts/api/sample_dataset_update_status.py +scripts/api/sample_update_state.py +scripts/api/search.py +scripts/api/sequencer_configuration_create.py +scripts/api/upload_to_history.py +scripts/api/workflow_delete.py +scripts/api/workflow_execute.py +scripts/api/workflow_import_from_file_rpark.py +scripts/api/workflow_import.py + scripts/api/data_manager_example_execute.py scripts/api/display.py scripts/api/workflow_execute_parameters.py diff --git a/scripts/api/example_watch_folder.py b/scripts/api/example_watch_folder.py index 83cf471a8442..86eaa486f2f5 100755 --- a/scripts/api/example_watch_folder.py +++ b/scripts/api/example_watch_folder.py @@ -10,6 +10,7 @@ NOTE: The upload method used requires the data library filesystem upload allow_library_path_paste """ +from __future__ import print_function import os import shutil import sys @@ -35,10 +36,10 @@ def main(api_key, api_url, in_folder, out_folder, data_library, workflow): library_folder_id = f['id'] workflow = display(api_key, api_url + 'workflows/%s' % workflow, return_formatted=False) if not workflow: - print "Workflow %s not found, terminating." + print("Workflow %s not found, terminating.") sys.exit(1) if not library_id or not library_folder_id: - print "Failure to configure library destination." + print("Failure to configure library destination.") sys.exit(1) while 1: # Watch in_folder, upload anything that shows up there to data library and get ldda, @@ -65,11 +66,11 @@ def main(api_key, api_url, in_folder, out_folder, data_library, workflow): wf_data['workflow_id'] = workflow['id'] wf_data['history'] = "%s - %s" % (fname, workflow['name']) wf_data['ds_map'] = {} - for step_id, ds_in in workflow['inputs'].iteritems(): + for step_id, ds_in in workflow['inputs'].items(): wf_data['ds_map'][step_id] = {'src': 'ld', 'id': ds['id']} res = submit( api_key, api_url + 'workflows', wf_data, return_formatted=False) if res: - print res + print(res) # Successful workflow execution, safe to move dataset. shutil.move(fullpath, os.path.join(out_folder, fname)) time.sleep(10) @@ -83,6 +84,6 @@ def main(api_key, api_url, in_folder, out_folder, data_library, workflow): data_library = sys.argv[5] workflow = sys.argv[6] except IndexError: - print 'usage: %s key url in_folder out_folder data_library workflow' % os.path.basename( sys.argv[0] ) + print('usage: %s key url in_folder out_folder data_library workflow' % os.path.basename( sys.argv[0] )) sys.exit( 1 ) main(api_key, api_url, in_folder, out_folder, data_library, workflow ) diff --git a/scripts/api/form_create_from_xml.py b/scripts/api/form_create_from_xml.py index babd138ca2fd..c395a82e0586 100644 --- a/scripts/api/form_create_from_xml.py +++ b/scripts/api/form_create_from_xml.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +from __future__ import print_function import os import sys @@ -8,7 +9,7 @@ data = {} data[ 'xml_text' ] = open( sys.argv[3] ).read() except IndexError: - print 'usage: %s key url form_xml_description_file' % os.path.basename( sys.argv[0] ) + print('usage: %s key url form_xml_description_file' % os.path.basename( sys.argv[0] )) sys.exit( 1 ) diff --git a/scripts/api/history_create_history.py b/scripts/api/history_create_history.py index cc3b6ee88613..4f9be8006bdb 100644 --- a/scripts/api/history_create_history.py +++ b/scripts/api/history_create_history.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +from __future__ import print_function import os import sys @@ -7,7 +8,7 @@ try: assert sys.argv[2] except IndexError: - print 'usage: %s key url [name] ' % os.path.basename( sys.argv[0] ) + print('usage: %s key url [name] ' % os.path.basename( sys.argv[0] )) sys.exit( 1 ) try: data = {} diff --git a/scripts/api/history_delete_history.py b/scripts/api/history_delete_history.py index 0c68eeb33f85..be3d1179e96d 100644 --- a/scripts/api/history_delete_history.py +++ b/scripts/api/history_delete_history.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +from __future__ import print_function import os import sys @@ -7,7 +8,7 @@ try: assert sys.argv[2] except IndexError: - print 'usage: %s key url [purge (true/false)] ' % os.path.basename( sys.argv[0] ) + print('usage: %s key url [purge (true/false)] ' % os.path.basename( sys.argv[0] )) sys.exit( 1 ) try: data = {} diff --git a/scripts/api/import_library_dataset_to_history.py b/scripts/api/import_library_dataset_to_history.py index 6984e15a6d18..7b5776be4a7d 100644 --- a/scripts/api/import_library_dataset_to_history.py +++ b/scripts/api/import_library_dataset_to_history.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +from __future__ import print_function import os import sys @@ -9,8 +10,8 @@ data = {} data['from_ld_id'] = sys.argv[3] except IndexError: - print 'usage: %s key url library_file_id' % os.path.basename( sys.argv[0] ) - print ' library_file_id is from /api/libraries//contents/' + print('usage: %s key url library_file_id' % os.path.basename( sys.argv[0] )) + print(' library_file_id is from /api/libraries//contents/') sys.exit( 1 ) submit( sys.argv[1], sys.argv[2], data ) diff --git a/scripts/api/import_workflows_from_installed_tool_shed_repository.py b/scripts/api/import_workflows_from_installed_tool_shed_repository.py index 867f1f318246..52b6fec21919 100644 --- a/scripts/api/import_workflows_from_installed_tool_shed_repository.py +++ b/scripts/api/import_workflows_from_installed_tool_shed_repository.py @@ -5,6 +5,7 @@ Here is a working example of how to use this script to repair a repository installed into Galaxy. python ./import_workflows_from_installed_tool_shed_repository.py -a 22be3b -l http://localhost:8763/ -n workflow_with_tools -o test -r ef45bb64237e -u http://localhost:9009/ """ +from __future__ import print_function import argparse from common import display, submit @@ -48,7 +49,7 @@ def main( options ): url = '%s%s' % ( base_galaxy_url, '/api/tool_shed_repositories/%s/import_workflows' % str( tool_shed_repository_id ) ) submit( options.api, url, data ) else: - print "Invalid tool_shed / name / owner / changeset_revision." + print("Invalid tool_shed / name / owner / changeset_revision.") if __name__ == '__main__': parser = argparse.ArgumentParser( description='Import workflows contained in an installed tool shed repository via the Galaxy API.' ) diff --git a/scripts/api/library_create_folder.py b/scripts/api/library_create_folder.py index e6c3db60a049..077a5e497163 100755 --- a/scripts/api/library_create_folder.py +++ b/scripts/api/library_create_folder.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +from __future__ import print_function import os import sys @@ -10,12 +11,12 @@ data[ 'name' ] = sys.argv[4] data[ 'create_type' ] = 'folder' except IndexError: - print 'usage: %s key url folder_id name [description]' % os.path.basename( sys.argv[0] ) + print('usage: %s key url folder_id name [description]' % os.path.basename( sys.argv[0] )) sys.exit( 1 ) try: data[ 'description' ] = sys.argv[5] except IndexError: - print "Unable to set description; using empty description in its place" + print("Unable to set description; using empty description in its place") data[ 'description' ] = '' submit( sys.argv[1], sys.argv[2], data ) diff --git a/scripts/api/library_create_library.py b/scripts/api/library_create_library.py index c915e876da8b..73be35dc8021 100755 --- a/scripts/api/library_create_library.py +++ b/scripts/api/library_create_library.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +from __future__ import print_function import os import sys @@ -8,7 +9,7 @@ data = {} data[ 'name' ] = sys.argv[3] except IndexError: - print 'usage: %s key url name [description] [synopsys]' % os.path.basename( sys.argv[0] ) + print('usage: %s key url name [description] [synopsys]' % os.path.basename( sys.argv[0] )) sys.exit( 1 ) try: data[ 'description' ] = sys.argv[4] diff --git a/scripts/api/library_upload_from_import_dir.py b/scripts/api/library_upload_from_import_dir.py index 57f6ccdc86da..f5ae2cf327ee 100755 --- a/scripts/api/library_upload_from_import_dir.py +++ b/scripts/api/library_upload_from_import_dir.py @@ -3,6 +3,7 @@ Example usage: ./library_upload_from_import_dir.py http://127.0.0.1:8080/api/libraries/dda47097d9189f15/contents Fdda47097d9189f15 auto /Users/EnisAfgan/projects/pprojects/galaxy/lib_upload_dir ? """ +from __future__ import print_function import os import sys @@ -17,7 +18,7 @@ data[ 'upload_option' ] = 'upload_directory' data[ 'create_type' ] = 'file' except IndexError: - print 'usage: %s key url folder_id file_type server_dir dbkey' % os.path.basename( sys.argv[0] ) + print('usage: %s key url folder_id file_type server_dir dbkey' % os.path.basename( sys.argv[0] )) sys.exit( 1 ) submit( sys.argv[1], sys.argv[2], data ) diff --git a/scripts/api/load_data_with_metadata.py b/scripts/api/load_data_with_metadata.py index 96480f23d2ff..1c7d2e6bb5da 100755 --- a/scripts/api/load_data_with_metadata.py +++ b/scripts/api/load_data_with_metadata.py @@ -9,6 +9,7 @@ NOTE: The upload method used requires the data library filesystem upload allow_library_path_paste """ +from __future__ import print_function import argparse import json import os @@ -36,7 +37,7 @@ def load_file(fullpath, api_key, api_url, library_id, library_folder_id, uuid_fi data['uuid'] = ext_meta[uuid_field] libset = submit(api_key, api_url + "libraries/%s/contents" % library_id, data, return_formatted=True) - print libset + print(libset) def main(api_key, api_url, in_folder, data_library, uuid_field=None): @@ -55,19 +56,19 @@ def main(api_key, api_url, in_folder, data_library, uuid_field=None): if f['name'] == "/": library_folder_id = f['id'] if not library_id or not library_folder_id: - print "Failure to configure library destination." + print("Failure to configure library destination.") sys.exit(1) if os.path.isfile(in_folder): if os.path.exists(in_folder + ".json"): fullpath = os.path.abspath(in_folder) - print "Loading", fullpath + print("Loading", fullpath) load_file(fullpath, api_key, api_url, library_id, library_folder_id, uuid_field) else: for fname in os.listdir(in_folder): fullpath = os.path.join(in_folder, fname) if os.path.isfile(fullpath) and os.path.exists(fullpath + ".json"): - print "Loading", fullpath + print("Loading", fullpath) load_file(fullpath, api_key, api_url, library_id, library_folder_id, uuid_field) if __name__ == '__main__': diff --git a/scripts/api/repair_tool_shed_repository.py b/scripts/api/repair_tool_shed_repository.py index 5316f7359af9..18c8a3e61e52 100644 --- a/scripts/api/repair_tool_shed_repository.py +++ b/scripts/api/repair_tool_shed_repository.py @@ -5,6 +5,7 @@ Here is a working example of how to use this script to repair a repository installed into Galaxy. ./repair_tool_shed_repository.py --api --local --url http://testtoolshed.g2.bx.psu.edu --name gregs_filter --owner greg --revision f28d5018f9cb """ +from __future__ import print_function import argparse from common import display, submit @@ -44,7 +45,7 @@ def main( options ): url = '%s%s' % ( base_galaxy_url, '/api/tool_shed_repositories/%s/repair_repository_revision' % str( tool_shed_repository_id ) ) submit( options.api, url, data ) else: - print "Invalid tool_shed / name / owner / changeset_revision." + print("Invalid tool_shed / name / owner / changeset_revision.") if __name__ == '__main__': parser = argparse.ArgumentParser( description='Installation of tool shed repositories via the Galaxy API.' ) diff --git a/scripts/api/request_type_create_from_xml.py b/scripts/api/request_type_create_from_xml.py index 162d2e8e4929..5bf98ba812fe 100755 --- a/scripts/api/request_type_create_from_xml.py +++ b/scripts/api/request_type_create_from_xml.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +from __future__ import print_function import os import sys @@ -11,7 +12,7 @@ data[ 'sequencer_id' ] = sys.argv[5] data[ 'xml_text' ] = open( sys.argv[6] ).read() except IndexError: - print 'usage: %s key url request_form_id sample_form_id request_type_xml_description_file [access_role_ids,]' % os.path.basename( sys.argv[0] ) + print('usage: %s key url request_form_id sample_form_id request_type_xml_description_file [access_role_ids,]' % os.path.basename( sys.argv[0] )) sys.exit( 1 ) try: data[ 'role_ids' ] = [ i for i in sys.argv[7].split( ',' ) if i ] diff --git a/scripts/api/requests_update_state.py b/scripts/api/requests_update_state.py index 826e38f1ecb7..6a0cb8b0c9ce 100755 --- a/scripts/api/requests_update_state.py +++ b/scripts/api/requests_update_state.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +from __future__ import print_function import os import sys @@ -8,7 +9,7 @@ data = {} data[ 'update_type' ] = 'request_state' except IndexError: - print 'usage: %s key url' % os.path.basename( sys.argv[0] ) + print('usage: %s key url' % os.path.basename( sys.argv[0] )) sys.exit( 1 ) update( sys.argv[1], sys.argv[2], data, return_formatted=True ) diff --git a/scripts/api/sample_dataset_update_status.py b/scripts/api/sample_dataset_update_status.py index 3313311b13cb..e60bb90cdba3 100644 --- a/scripts/api/sample_dataset_update_status.py +++ b/scripts/api/sample_dataset_update_status.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +from __future__ import print_function import os import sys @@ -10,11 +11,11 @@ data[ 'sample_dataset_ids' ] = sys.argv[3].split(',') data[ 'new_status' ] = sys.argv[4] except IndexError: - print 'usage: %s key url sample_dataset_ids new_state [error msg]' % os.path.basename( sys.argv[0] ) + print('usage: %s key url sample_dataset_ids new_state [error msg]' % os.path.basename( sys.argv[0] )) sys.exit( 1 ) try: data[ 'error_msg' ] = sys.argv[5] except IndexError: data[ 'error_msg' ] = '' -print data +print(data) update( sys.argv[1], sys.argv[2], data, return_formatted=True ) diff --git a/scripts/api/sample_update_state.py b/scripts/api/sample_update_state.py index 2c7c09ad2d45..fc5939da1de4 100755 --- a/scripts/api/sample_update_state.py +++ b/scripts/api/sample_update_state.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +from __future__ import print_function import os import sys @@ -9,7 +10,7 @@ data[ 'update_type' ] = 'sample_state' data[ 'new_state' ] = sys.argv[3] except IndexError: - print 'usage: %s key url new_state [comment]' % os.path.basename( sys.argv[0] ) + print('usage: %s key url new_state [comment]' % os.path.basename( sys.argv[0] )) sys.exit( 1 ) try: data[ 'comment' ] = sys.argv[4] diff --git a/scripts/api/search.py b/scripts/api/search.py index 51521548c92d..3fff1bf38638 100644 --- a/scripts/api/search.py +++ b/scripts/api/search.py @@ -1,6 +1,7 @@ """ Sample script for Galaxy Search API """ +from __future__ import print_function import json import requests import sys @@ -32,26 +33,26 @@ def post(self, path, payload): rg = RemoteGalaxy(server, api_key) - print "select name, id, file_size from hda" - print rg.post("/api/search", { "query": "select name, id, file_size from hda" }) + print("select name, id, file_size from hda") + print(rg.post("/api/search", { "query": "select name, id, file_size from hda" })) - print "select name from hda" - print rg.post("/api/search", { "query": "select name from hda" }) + print("select name from hda") + print(rg.post("/api/search", { "query": "select name from hda" })) - print "select name, model_class from ldda" - print rg.post("/api/search", { "query": "select name, model_class from ldda" }) + print("select name, model_class from ldda") + print(rg.post("/api/search", { "query": "select name, model_class from ldda" })) - print "select * from history" - print rg.post("/api/search", { "query": "select * from history" }) + print("select * from history") + print(rg.post("/api/search", { "query": "select * from history" })) - print "select * from tool" - print rg.post("/api/search", { "query": "select * from tool" }) + print("select * from tool") + print(rg.post("/api/search", { "query": "select * from tool" })) - print "select * from workflow" - print rg.post("/api/search", { "query": "select * from workflow" }) + print("select * from workflow") + print(rg.post("/api/search", { "query": "select * from workflow" })) - print "select id, name from history where name='Unnamed history'" - print rg.post("/api/search", {"query": "select id, name from history where name='Unnamed history'"}) + print("select id, name from history where name='Unnamed history'") + print(rg.post("/api/search", {"query": "select id, name from history where name='Unnamed history'"})) - print "select * from history where name='Unnamed history'" - print rg.post("/api/search", {"query": "select * from history where name='Unnamed history'"}) + print("select * from history where name='Unnamed history'") + print(rg.post("/api/search", {"query": "select * from history where name='Unnamed history'"})) diff --git a/scripts/api/sequencer_configuration_create.py b/scripts/api/sequencer_configuration_create.py index 3b4ceb7b5b16..c21abf4d2625 100755 --- a/scripts/api/sequencer_configuration_create.py +++ b/scripts/api/sequencer_configuration_create.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +from __future__ import print_function import os import sys @@ -40,7 +41,7 @@ def main(): request_type_filename = sys.argv[5] email_addresses = sys.argv[6].split( ',' ) except IndexError: - print 'usage: %s key base_url request_form_xml_description_file sample_form_xml_description_file request_type_xml_description_file email_address1[,email_address2]' % os.path.basename( sys.argv[0] ) + print('usage: %s key base_url request_form_xml_description_file sample_form_xml_description_file request_type_xml_description_file email_address1[,email_address2]' % os.path.basename( sys.argv[0] )) sys.exit( 1 ) return create_sequencer_configuration( key, base_url, request_form_filename, sample_form_filename, request_type_filename, email_addresses, return_formatted=True ) diff --git a/scripts/api/upload_to_history.py b/scripts/api/upload_to_history.py index cd98ed0c9af9..1216cf990167 100755 --- a/scripts/api/upload_to_history.py +++ b/scripts/api/upload_to_history.py @@ -2,6 +2,7 @@ """ Upload a file to the desired history. """ +from __future__ import print_function import json import os import sys @@ -9,8 +10,8 @@ try: import requests except ImportError: - print "Could not import the requests module. See http://docs.python-requests.org/en/latest/" + \ - " or install with 'pip install requests'" + print("Could not import the requests module. See http://docs.python-requests.org/en/latest/" + + " or install with 'pip install requests'") raise @@ -28,7 +29,7 @@ def upload_file( base_url, api_key, history_id, filepath, **kwargs ): # TODO: the following doesn't work with tools.py 'dbkey' : '?', 'file_type' : kwargs.get( 'file_type', 'auto' ), - 'ajax_upload' : u'true', + 'ajax_upload' : 'true', } payload[ 'inputs' ] = json.dumps( inputs ) @@ -41,13 +42,13 @@ def upload_file( base_url, api_key, history_id, filepath, **kwargs ): if __name__ == '__main__': if len( sys.argv ) < 5: - print "history_upload.py \n" + \ - " (where galaxy base url is just the root url where your Galaxy is served; e.g. 'localhost:8080')" + print("history_upload.py \n" + + " (where galaxy base url is just the root url where your Galaxy is served; e.g. 'localhost:8080')") sys.exit( 1 ) api_key, base_url, history_id, filepath = sys.argv[1:5] kwargs = dict([ kwarg.split('=', 1) for kwarg in sys.argv[5:]]) response = upload_file( base_url, api_key, history_id, filepath, **kwargs ) - print >> sys.stderr, response - print response.content + print(response, file=sys.stderr) + print(response.content) diff --git a/scripts/api/workflow_delete.py b/scripts/api/workflow_delete.py index 4f64f27c8cf4..cc68c93da18d 100644 --- a/scripts/api/workflow_delete.py +++ b/scripts/api/workflow_delete.py @@ -8,6 +8,7 @@ Example calls: python workflow_delete.py /api/workflows/ True """ +from __future__ import print_function import os import sys @@ -16,7 +17,7 @@ try: assert sys.argv[2] except IndexError: - print 'usage: %s key url [purge (true/false)] ' % os.path.basename( sys.argv[0] ) + print('usage: %s key url [purge (true/false)] ' % os.path.basename( sys.argv[0] )) sys.exit( 1 ) try: data = {} diff --git a/scripts/api/workflow_execute.py b/scripts/api/workflow_execute.py index 6537104721d2..cd37368fccfd 100755 --- a/scripts/api/workflow_execute.py +++ b/scripts/api/workflow_execute.py @@ -5,6 +5,7 @@ python workflow_execute.py /api/workflows f2db41e1fa331b3e 'Test API History' '38=ldda=0qr350234d2d192f' python workflow_execute.py /api/workflows f2db41e1fa331b3e 'hist_id=a912e9e5d84530d4' '38=hda=03501d7626bd192f' """ +from __future__ import print_function import os import sys @@ -23,7 +24,7 @@ def main(): step, src, ds_id = v.split('=') data['ds_map'][step] = {'src': src, 'id': ds_id} except IndexError: - print 'usage: %s key url workflow_id history step=src=dataset_id' % os.path.basename(sys.argv[0]) + print('usage: %s key url workflow_id history step=src=dataset_id' % os.path.basename(sys.argv[0])) sys.exit(1) submit( sys.argv[1], sys.argv[2], data ) diff --git a/scripts/api/workflow_import.py b/scripts/api/workflow_import.py index f86bb0adb2ca..d1464668d6f9 100755 --- a/scripts/api/workflow_import.py +++ b/scripts/api/workflow_import.py @@ -4,6 +4,7 @@ Example calls: python workflow_import.py '/path/to/workflow/file [--add_to_menu]' """ +from __future__ import print_function import os import sys @@ -20,7 +21,7 @@ def main(): if len(sys.argv) > 4 and sys.argv[4] == "--add_to_menu": data['add_to_menu'] = True except IndexError: - print 'usage: %s key galaxy_url workflow_file' % os.path.basename(sys.argv[0]) + print('usage: %s key galaxy_url workflow_file' % os.path.basename(sys.argv[0])) sys.exit(1) # print display( api_key, api_base_url + "/api/workflows" ) submit( api_key, api_url, data, return_formatted=False ) diff --git a/scripts/api/workflow_import_from_file_rpark.py b/scripts/api/workflow_import_from_file_rpark.py index 1f23216821a3..9ee5a1637852 100644 --- a/scripts/api/workflow_import_from_file_rpark.py +++ b/scripts/api/workflow_import_from_file_rpark.py @@ -3,6 +3,7 @@ python rpark_import_workflow_from_file.py 35a24ae2643785ff3d046c98ea362c7f http://localhost:8080/api/workflows/import 'spp_submodule.ga' python rpark_import_workflow_from_file.py 35a24ae2643785ff3d046c98ea362c7f http://localhost:8080/api/workflows/import 'spp_submodule.ga' """ +from __future__ import print_function import json import os import sys @@ -19,7 +20,7 @@ def openWorkflow(in_file): try: assert sys.argv[2] except IndexError: - print 'usage: %s key url [name] ' % os.path.basename( sys.argv[0] ) + print('usage: %s key url [name] ' % os.path.basename( sys.argv[0] )) sys.exit( 1 ) try: data = {} From 3b31f02d07834882a46ebcd2ff82423c2f4b5df2 Mon Sep 17 00:00:00 2001 From: Timur Shtatland Date: Sat, 25 Jun 2016 21:03:20 -0400 Subject: [PATCH 34/86] Python 3: more lib/galaxy/datatypes/converters/ lib/galaxy/datatypes/converters/lped_to_fped_converter.py lib/galaxy/datatypes/converters/lped_to_pbed_converter.py lib/galaxy/datatypes/converters/maf_to_fasta_converter.py lib/galaxy/datatypes/converters/maf_to_interval_converter.py --- .ci/py3_sources.txt | 4 ++++ lib/galaxy/datatypes/converters/lped_to_fped_converter.py | 4 ++-- lib/galaxy/datatypes/converters/lped_to_pbed_converter.py | 4 ++-- lib/galaxy/datatypes/converters/maf_to_fasta_converter.py | 2 +- .../datatypes/converters/maf_to_interval_converter.py | 8 +++++--- 5 files changed, 14 insertions(+), 8 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index a828fdbca07a..24f87a532bcd 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -11,6 +11,10 @@ lib/galaxy/datatypes/converters/gff_to_bed_converter.py lib/galaxy/datatypes/converters/interval_to_bed_converter.py lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.py lib/galaxy/datatypes/converters/interval_to_coverage.py +lib/galaxy/datatypes/converters/lped_to_fped_converter.py +lib/galaxy/datatypes/converters/lped_to_pbed_converter.py +lib/galaxy/datatypes/converters/maf_to_fasta_converter.py +lib/galaxy/datatypes/converters/maf_to_interval_converter.py lib/galaxy/datatypes/sequence.py lib/galaxy/datatypes/sniff.py lib/galaxy/datatypes/tabular.py diff --git a/lib/galaxy/datatypes/converters/lped_to_fped_converter.py b/lib/galaxy/datatypes/converters/lped_to_fped_converter.py index cee1284e4d67..de0c14a340bc 100644 --- a/lib/galaxy/datatypes/converters/lped_to_fped_converter.py +++ b/lib/galaxy/datatypes/converters/lped_to_fped_converter.py @@ -63,7 +63,7 @@ def rgConv(inpedfilepath, outhtmlname, outfilepath): if i == 0: lrow = row.split() try: - x = [int(x) for x in lrow[10:50]] # look for non numeric codes + [int(x) for x in lrow[10:50]] # look for non numeric codes except: dorecode = 1 if dorecode: @@ -100,7 +100,7 @@ def main(): flist = os.listdir(outfilepath) with open(outhtmlname, 'w') as f: f.write(galhtmlprefix % prog) - print '## Rgenetics: http://rgenetics.org Galaxy Tools %s %s' % (prog, timenow()) # becomes info + print('## Rgenetics: http://rgenetics.org Galaxy Tools %s %s' % (prog, timenow())) # becomes info f.write('
## Rgenetics: http://rgenetics.org Galaxy Tools %s %s\n
    ' % (prog, timenow())) for i, data in enumerate( flist ): f.write('
  1. %s
  2. \n' % (os.path.split(data)[-1], os.path.split(data)[-1])) diff --git a/lib/galaxy/datatypes/converters/lped_to_pbed_converter.py b/lib/galaxy/datatypes/converters/lped_to_pbed_converter.py index 28800b9cb25d..84e1c10d4796 100644 --- a/lib/galaxy/datatypes/converters/lped_to_pbed_converter.py +++ b/lib/galaxy/datatypes/converters/lped_to_pbed_converter.py @@ -68,7 +68,7 @@ def rgConv(inpedfilepath, outhtmlname, outfilepath, plink): outroot = os.path.join(outfilepath, basename) missval = getMissval(inped=pedf) if not missval: - print '### lped_to_pbed_converter.py cannot identify missing value in %s' % pedf + print('### lped_to_pbed_converter.py cannot identify missing value in %s' % pedf) missval = '0' cl = '%s --noweb --file %s --make-bed --out %s --missing-genotype %s' % (plink, inpedfilepath, outroot, missval) p = subprocess.Popen(cl, shell=True, cwd=outfilepath) @@ -100,7 +100,7 @@ def main(): with open(outhtmlname, 'w') as f: f.write(galhtmlprefix % prog) s = '## Rgenetics: http://rgenetics.org Galaxy Tools %s %s' % (prog, timenow()) # becomes info - print s + print(s) f.write('
    %s\n
      ' % (s)) for i, data in enumerate( flist ): f.write('
    1. %s
    2. \n' % (os.path.split(data)[-1], os.path.split(data)[-1])) diff --git a/lib/galaxy/datatypes/converters/maf_to_fasta_converter.py b/lib/galaxy/datatypes/converters/maf_to_fasta_converter.py index 068cf8584bc6..1d13db1ddfbe 100644 --- a/lib/galaxy/datatypes/converters/maf_to_fasta_converter.py +++ b/lib/galaxy/datatypes/converters/maf_to_fasta_converter.py @@ -26,7 +26,7 @@ def __main__(): out.write( "%s\n" % c.text ) out.write( "\n" ) out.close() - print "%i MAF blocks converted to FASTA." % ( count ) + print("%i MAF blocks converted to FASTA." % ( count )) if __name__ == "__main__": diff --git a/lib/galaxy/datatypes/converters/maf_to_interval_converter.py b/lib/galaxy/datatypes/converters/maf_to_interval_converter.py index 812fdeac25f0..140d77fe4cee 100644 --- a/lib/galaxy/datatypes/converters/maf_to_interval_converter.py +++ b/lib/galaxy/datatypes/converters/maf_to_interval_converter.py @@ -1,7 +1,9 @@ #!/usr/bin/env python # Dan Blankenberg -import sys +from __future__ import print_function + +import sys import bx.align.maf from galaxy.tools.util import maf_utilities @@ -24,9 +26,9 @@ def __main__(): out.write( "%s\t%i\t%i\t%s\n" % ( maf_utilities.src_split( c.src )[-1], c.get_forward_strand_start(), c.get_forward_strand_end(), c.strand ) ) count += 1 except Exception as e: - print >> sys.stderr, "There was a problem processing your input: %s" % e + print("There was a problem processing your input: %s" % e, file=sys.stderr) out.close() - print "%i MAF blocks converted to Genomic Intervals for species %s." % ( count, species ) + print("%i MAF blocks converted to Genomic Intervals for species %s." % ( count, species )) if __name__ == "__main__": From ef335721679618b9c53b6024610d6bc024129774 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Sat, 25 Jun 2016 21:20:50 -0400 Subject: [PATCH 35/86] Python3: lib/galaxy/datatypes/dataproviders/ --- .ci/py3_sources.txt | 1 + lib/galaxy/datatypes/dataproviders/column.py | 10 +++++----- lib/galaxy/datatypes/dataproviders/line.py | 4 ++-- lib/galaxy/datatypes/util/gff_util.py | 1 + 4 files changed, 9 insertions(+), 7 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index 24f87a532bcd..58575f95df1f 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -15,6 +15,7 @@ lib/galaxy/datatypes/converters/lped_to_fped_converter.py lib/galaxy/datatypes/converters/lped_to_pbed_converter.py lib/galaxy/datatypes/converters/maf_to_fasta_converter.py lib/galaxy/datatypes/converters/maf_to_interval_converter.py +lib/galaxy/datatypes/dataproviders/ lib/galaxy/datatypes/sequence.py lib/galaxy/datatypes/sniff.py lib/galaxy/datatypes/tabular.py diff --git a/lib/galaxy/datatypes/dataproviders/column.py b/lib/galaxy/datatypes/dataproviders/column.py index 08ee29e9e40c..d57f415026c8 100644 --- a/lib/galaxy/datatypes/dataproviders/column.py +++ b/lib/galaxy/datatypes/dataproviders/column.py @@ -3,10 +3,10 @@ is further subdivided into multiple data (e.g. columns from a line). """ -import urllib +from six.moves.urllib.parse import unquote_plus import re -import line +from . import line _TODO = """ move ColumnarDataProvider parsers to more sensible location @@ -96,7 +96,7 @@ def __init__( self, source, indeces=None, self.column_count = len( self.column_types ) # if no indeces given, infer from column_count if not self.selected_column_indeces and self.column_count: - self.selected_column_indeces = list( xrange( self.column_count ) ) + self.selected_column_indeces = list( range( self.column_count ) ) self.deliminator = deliminator @@ -182,7 +182,7 @@ def create_string_filter( self, column, op, val ): elif 'has' == op: return lambda d: val in d[column] elif 're' == op: - val = urllib.unquote_plus( val ) + val = unquote_plus( val ) val = re.compile( val ) return lambda d: val.match( d[column] ) is not None return None @@ -258,7 +258,7 @@ def parse_columns_from_line( self, line ): # TODO: too much going on in this loop - the above should all be precomputed AMAP... all_columns = line.split( self.deliminator ) # if no indeces were passed to init, return all columns - selected_indeces = self.selected_column_indeces or list( xrange( len( all_columns ) ) ) + selected_indeces = self.selected_column_indeces or list( range( len( all_columns ) ) ) parsed_columns = [] for parser_index, column_index in enumerate( selected_indeces ): parsed_columns.append( self.parse_column_at_index( all_columns, parser_index, column_index ) ) diff --git a/lib/galaxy/datatypes/dataproviders/line.py b/lib/galaxy/datatypes/dataproviders/line.py index 62181d61afdb..7836bf2863fb 100644 --- a/lib/galaxy/datatypes/dataproviders/line.py +++ b/lib/galaxy/datatypes/dataproviders/line.py @@ -4,7 +4,7 @@ import collections import re -import base +from . import base import logging log = logging.getLogger( __name__ ) @@ -241,7 +241,7 @@ def assemble_current_block( self ): Called per block (just before providing). """ # empty block_lines and assemble block - return list( ( self.block_lines.popleft() for i in xrange( len( self.block_lines ) ) ) ) + return list( ( self.block_lines.popleft() for i in range( len( self.block_lines ) ) ) ) def filter_block( self, block ): """ diff --git a/lib/galaxy/datatypes/util/gff_util.py b/lib/galaxy/datatypes/util/gff_util.py index 68e35adf4819..2a445e03765d 100644 --- a/lib/galaxy/datatypes/util/gff_util.py +++ b/lib/galaxy/datatypes/util/gff_util.py @@ -149,6 +149,7 @@ def parse_row( self, line ): self.default_strand, fix_strand=self.fix_strand ) return interval + # For Python3 this needs to be changed to __next__() after bx-python library is ported too def next( self ): """ Returns next GFFFeature. """ From 86a54452b32008b91b400aa06213fe3d394730c3 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Sat, 25 Jun 2016 21:22:34 -0400 Subject: [PATCH 36/86] Revert change to callable(), it has been brought back in Python 3.2 --- test/unit/managers/base.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/test/unit/managers/base.py b/test/unit/managers/base.py index fe40a6f3acca..0e61b2f0bd8a 100644 --- a/test/unit/managers/base.py +++ b/test/unit/managers/base.py @@ -5,7 +5,6 @@ import os import imp import json -import collections from six import string_types @@ -111,7 +110,7 @@ def assertORMFilter( self, item, msg=None ): self.assertTrue( True, msg or ( 'is an orm filter: ' + str( item ) ) ) def assertFnFilter( self, item, msg=None ): - if not item or not isinstance( item, collections.Callable): + if not item or not callable( item ): self.fail( 'Not a fn filter: ' + str( type( item ) ) ) self.assertTrue( True, msg or ( 'is a fn filter: ' + str( item ) ) ) From 31c61caba371507c3ad33cffaab1e05d4c1684c1 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Sat, 25 Jun 2016 21:27:09 -0400 Subject: [PATCH 37/86] Simplify .ci/py3_sources.txt --- .ci/py3_sources.txt | 27 +-------------------------- 1 file changed, 1 insertion(+), 26 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index 58575f95df1f..c0cf12fed16d 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -57,32 +57,7 @@ lib/tool_shed/repository_types/ lib/tool_shed/tools/ lib/tool_shed/util/ lib/tool_shed/utility_containers/ -scripts/api/common.py -scripts/api/example_watch_folder.py -scripts/api/form_create_from_xml.py -scripts/api/history_delete_history.py -scripts/api/import_library_dataset_to_history.py -scripts/api/import_workflows_from_installed_tool_shed_repository.py -scripts/api/library_create_folder.py -scripts/api/library_create_library.py -scripts/api/library_upload_from_import_dir.py -scripts/api/load_data_with_metadata.py -scripts/api/repair_tool_shed_repository.py -scripts/api/requests_update_state.py -scripts/api/request_type_create_from_xml.py -scripts/api/sample_dataset_update_status.py -scripts/api/sample_update_state.py -scripts/api/search.py -scripts/api/sequencer_configuration_create.py -scripts/api/upload_to_history.py -scripts/api/workflow_delete.py -scripts/api/workflow_execute.py -scripts/api/workflow_import_from_file_rpark.py -scripts/api/workflow_import.py - -scripts/api/data_manager_example_execute.py -scripts/api/display.py -scripts/api/workflow_execute_parameters.py +scripts/api/ scripts/auth/ scripts/cleanup_datasets/admin_cleanup_datasets.py scripts/cleanup_datasets/cleanup_datasets.py From f68a8198da85101f241f0c8d8935b06b03019ebe Mon Sep 17 00:00:00 2001 From: Junzhou Wang Date: Sat, 25 Jun 2016 21:46:52 -0400 Subject: [PATCH 38/86] Python3: scripts/cleanup_datasets scripts/data_libraries --- .ci/py3_sources.txt | 14 ++++++++ scripts/bootstrap_history.py | 36 +++++++++---------- scripts/build_toolbox.py | 13 +++---- scripts/check_galaxy.py | 15 ++++---- scripts/check_python.py | 6 ++-- scripts/cleanup_datasets/pgcleanup.py | 6 ++-- scripts/cleanup_datasets/populate_uuid.py | 12 +++---- .../remove_renamed_datasets_from_disk.py | 16 ++++----- .../rename_purged_datasets.py | 16 ++++----- .../cleanup_datasets/update_dataset_size.py | 16 ++++----- scripts/cleanup_datasets/update_metadata.py | 16 ++++----- scripts/data_libraries/build_whoosh_index.py | 9 ++--- scripts/db_shell.py | 9 +++-- scripts/drmaa_external_runner.py | 4 +-- scripts/fetch_eggs.py | 6 ++-- 15 files changed, 108 insertions(+), 86 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index c0cf12fed16d..ac87492704dd 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -59,6 +59,20 @@ lib/tool_shed/util/ lib/tool_shed/utility_containers/ scripts/api/ scripts/auth/ +scripts/bootstrap_history.py +scripts/build_toolbox.py +scripts/check_eggs.py +scripts/check_galaxy.py +scripts/check_python.py +scripts/cleanup_datasets/pgcleanup.py +scripts/cleanup_datasets/populate_uuid.py +scripts/cleanup_datasets/remove_renamed_datasets_from_disk.py +scripts/cleanup_datasets/rename_purged_datasets.py +scripts/cleanup_datasets/update_dataset_size.py +scripts/cleanup_datasets/update_metadata.py +scripts/data_libraries/build_whoosh_index.py +scripts/db_shell.py +scripts/drmaa_external_runner.py scripts/cleanup_datasets/admin_cleanup_datasets.py scripts/cleanup_datasets/cleanup_datasets.py test/api/test_workflows_from_yaml.py diff --git a/scripts/bootstrap_history.py b/scripts/bootstrap_history.py index f010131f461f..e2cf537ad2c7 100644 --- a/scripts/bootstrap_history.py +++ b/scripts/bootstrap_history.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # Little script to make HISTORY.rst more easy to format properly, lots TODO # pull message down and embed, use arg parse, handle multiple, etc... - +from __future__ import print_function import ast import calendar import datetime @@ -13,14 +13,14 @@ import requests except ImportError: requests = None -import urlparse import textwrap import json try: from pygithub3 import Github except ImportError: Github = None - +from six.moves.urllib.parse import urljoin +from six import string_types PROJECT_DIRECTORY = os.path.join(os.path.dirname(__file__), os.pardir) SOURCE_DIR = os.path.join(PROJECT_DIRECTORY, "lib") @@ -251,7 +251,7 @@ def commit_time(commit_hash): - api_url = urlparse.urljoin(PROJECT_API, "commits/%s" % commit_hash) + api_url = urljoin(PROJECT_API, "commits/%s" % commit_hash) req = requests.get(api_url).json() return datetime.datetime.strptime(req["commit"]["committer"]["date"], "%Y-%m-%dT%H:%M:%SZ") @@ -331,7 +331,7 @@ def check_blocking_prs(argv): release_name = argv[2] block = 0 for pr in _get_prs(release_name, state="open"): - print "WARN: Blocking PR| %s" % _pr_to_str(pr) + print("WARN: Blocking PR| %s" % _pr_to_str(pr)) block = 1 sys.exit(block) @@ -349,20 +349,20 @@ def check_blocking_issues(argv): for page in issues: for issue in page: if issue.milestone and issue.milestone.title == release_name and "Publication of Galaxy Release" not in issue.title: - print "WARN: Blocking issue| %s" % _issue_to_str(issue) + print("WARN: Blocking issue| %s" % _issue_to_str(issue)) block = 1 sys.exit(block) def _pr_to_str(pr): - if isinstance(pr, basestring): + if isinstance(pr, string_types): return pr return "PR #%s (%s) %s" % (pr.number, pr.title, pr.html_url) def _issue_to_str(pr): - if isinstance(pr, basestring): + if isinstance(pr, string_types): return pr return "Issue #%s (%s) %s" % (pr.number, pr.title, pr.html_url) @@ -463,7 +463,7 @@ def extend(from_str, line, source=history): if len(argv) > 2: message = argv[2] elif not (ident.startswith("pr") or ident.startswith("issue")): - api_url = urlparse.urljoin(PROJECT_API, "commits/%s" % ident) + api_url = urljoin(PROJECT_API, "commits/%s" % ident) if req is None: req = requests.get(api_url).json() commit = req["commit"] @@ -471,13 +471,13 @@ def extend(from_str, line, source=history): message = get_first_sentence(message) elif requests is not None and ident.startswith("pr"): pull_request = ident[len("pr"):] - api_url = urlparse.urljoin(PROJECT_API, "pulls/%s" % pull_request) + api_url = urljoin(PROJECT_API, "pulls/%s" % pull_request) if req is None: req = requests.get(api_url).json() message = req["title"] elif requests is not None and ident.startswith("issue"): issue = ident[len("issue"):] - api_url = urlparse.urljoin(PROJECT_API, "issues/%s" % issue) + api_url = urljoin(PROJECT_API, "issues/%s" % issue) if req is None: req = requests.get(api_url).json() message = req["title"] @@ -522,7 +522,7 @@ def extend(from_str, line, source=history): def _text_target(github, pull_request): labels = [] pr_number = None - if isinstance(pull_request, basestring): + if isinstance(pull_request, string_types): pr_number = pull_request else: pr_number = pull_request.number @@ -530,10 +530,10 @@ def _text_target(github, pull_request): try: labels = github.issues.labels.list_by_issue(int(pr_number), user=PROJECT_OWNER, repo=PROJECT_NAME) except Exception as e: - print e + print(e) is_bug = is_enhancement = is_feature = is_minor = is_major = is_merge = is_small_enhancement = False if len(labels) == 0: - print 'No labels found for %s' % pr_number + print('No labels found for %s' % pr_number) return None for label in labels: label_name = label.name.lower() @@ -555,7 +555,7 @@ def _text_target(github, pull_request): is_some_kind_of_enhancement = is_enhancement or is_feature or is_small_enhancement if not( is_bug or is_some_kind_of_enhancement or is_minor or is_merge ): - print "No kind/ or minor or merge label found for %s" % _pr_to_str(pull_request) + print("No kind/ or minor or merge label found for %s" % _pr_to_str(pull_request)) text_target = None if is_minor or is_merge: @@ -574,7 +574,7 @@ def _text_target(github, pull_request): elif is_bug: text_target = "bug" else: - print "Logic problem, cannot determine section for %s" % _pr_to_str(pull_request) + print("Logic problem, cannot determine section for %s" % _pr_to_str(pull_request)) text_target = None return text_target @@ -597,8 +597,8 @@ def _latest_release(): def _releases(): all_files = sorted(os.listdir(RELEASES_PATH)) release_note_file_pattern = re.compile(r"\d+\.\d+.rst") - release_note_files = filter(lambda f: release_note_file_pattern.match(f), all_files) - return sorted(map(lambda f: f.rstrip('.rst'), release_note_files)) + release_note_files = [f for f in all_files if release_note_file_pattern.match(f)] + return sorted([f.rstrip('.rst') for f in release_note_files]) def _get_major_version(): diff --git a/scripts/build_toolbox.py b/scripts/build_toolbox.py index 3f3a79fb54f0..3e2a69c8c9d6 100644 --- a/scripts/build_toolbox.py +++ b/scripts/build_toolbox.py @@ -1,3 +1,4 @@ +from __future__ import print_function import os from xml.etree import ElementTree as ET @@ -21,7 +22,7 @@ def getfilenamelist(startdir): try: doc = ET.parse(fullfn) except: - print "An OOPS on", fullfn + print("An OOPS on", fullfn) raise rootelement = doc.getroot() # Only interpret those 'tool' XML files that have @@ -30,7 +31,7 @@ def getfilenamelist(startdir): if rootelement.findall('toolboxposition'): filenamelist.append(fullfn) else: - print "DBG> tool config does not have a
      :", fullfn + print("DBG> tool config does not have a
      :", fullfn) return filenamelist @@ -58,7 +59,7 @@ def add(self, toolelement, toolboxpositionelement): self.tools[("%05d-%s" % (sectionorder, section), label, order, section)].append(toolelement) def addElementsTo(self, rootelement): - toolkeys = self.tools.keys() + toolkeys = list(self.tools.keys()) toolkeys.sort() # Initialize the loop: IDs to zero, current section and label to '' @@ -139,13 +140,13 @@ def scanfiles(filenamelist): tagarray.append(tag.text) attrib['tags'] = ",".join(tagarray) else: - print "DBG> No tags in", fn + print("DBG> No tags in", fn) # Build the tool element newtoolelement = ET.Element('tool', attrib) toolboxpositionelements = toolelement.findall('toolboxposition') if not toolboxpositionelements: - print "DBG> %s has no toolboxposition" % fn + print("DBG> %s has no toolboxposition" % fn) else: for toolboxpositionelement in toolboxpositionelements: toolbox.add(newtoolelement, toolboxpositionelement) @@ -164,7 +165,7 @@ def assemble(): toolbox.addElementsTo(toolboxelement) - print prettify(toolboxelement) + print(prettify(toolboxelement)) if __name__ == "__main__": assemble() diff --git a/scripts/check_galaxy.py b/scripts/check_galaxy.py index 2d4359f9067f..e0eebc0f99d1 100755 --- a/scripts/check_galaxy.py +++ b/scripts/check_galaxy.py @@ -3,6 +3,7 @@ check_galaxy can be run by hand, although it is meant to run from cron via the check_galaxy.sh script in Galaxy's cron/ directory. """ +from __future__ import print_function import filecmp import formatter import getopt @@ -55,14 +56,14 @@ def usage(): try: opts, args = getopt.getopt( sys.argv[1:], 'n' ) except getopt.GetoptError as e: - print str(e) + print(str(e)) usage() if len( args ) < 1: usage() server = args[0] if server.endswith(".g2.bx.psu.edu"): if debug: - print "Checking a PSU Galaxy server, using maint file" + print("Checking a PSU Galaxy server, using maint file") maint = "/errordocument/502/%s/maint" % args[0].split('.', 1)[0] else: maint = None @@ -70,7 +71,7 @@ def usage(): for o, a in opts: if o == "-n": if debug: - print "Specified -n, will create a new history" + print("Specified -n, will create a new history") new_history = True else: usage() @@ -78,7 +79,7 @@ def usage(): # state information var_dir = os.path.join( os.path.expanduser('~'), ".check_galaxy", server ) if not os.access( var_dir, os.F_OK ): - os.makedirs( var_dir, 0700 ) + os.makedirs( var_dir, 0o700 ) # get user/pass login_file = os.path.join( var_dir, "login" ) @@ -137,7 +138,7 @@ def reset(self): p = didParser() p.feed(tc.browser.get_html()) if len(p.dids) > 0: - print "Remaining datasets ids:", " ".join( p.dids ) + print("Remaining datasets ids:", " ".join( p.dids )) raise Exception("History still contains datasets after attempting to delete them") if new_history: self.get("/history/delete_current") @@ -363,7 +364,7 @@ def handle_data(self, data): def dprint(str): if debug: - print str + print(str) # do stuff here if __name__ == "__main__": @@ -379,7 +380,7 @@ def dprint(str): dprint("not logged in... logging in") b.login(username, password) - for tool, params in tools.iteritems(): + for tool, params in tools.items(): check_file = "" diff --git a/scripts/check_python.py b/scripts/check_python.py index 3e05fdee4a00..c5d245198a06 100644 --- a/scripts/check_python.py +++ b/scripts/check_python.py @@ -2,7 +2,7 @@ If the current installed python version is not 2.7, prints an error message to stderr and returns 1 """ - +from __future__ import print_function import sys msg = """ERROR: Your Python version is: %s @@ -16,13 +16,13 @@ def check_python(): try: assert sys.version_info[:2] == ( 2, 7 ) except AssertionError: - print >>sys.stderr, msg + print(msg, file=sys.stderr) raise if __name__ == '__main__': rval = 0 try: check_python() - except StandardError: + except Exception: rval = 1 sys.exit( rval ) diff --git a/scripts/cleanup_datasets/pgcleanup.py b/scripts/cleanup_datasets/pgcleanup.py index ab8ca7c32504..39207f7ada28 100755 --- a/scripts/cleanup_datasets/pgcleanup.py +++ b/scripts/cleanup_datasets/pgcleanup.py @@ -4,14 +4,14 @@ bypassing the Galaxy model and operating directly on the database. PostgreSQL 9.1 or greater is required. """ - +from __future__ import print_function import datetime import inspect import logging import os import shutil import sys -from ConfigParser import ConfigParser +from configparser import ConfigParser from optparse import OptionParser galaxy_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) @@ -76,7 +76,7 @@ def __parse_args(self): self.options.sequence = [ x.strip() for x in self.options.sequence.split(',') ] if self.options.sequence == ['']: - print "Error: At least one action must be specified in the action sequence\n" + print("Error: At least one action must be specified in the action sequence\n") parser.print_help() sys.exit(0) diff --git a/scripts/cleanup_datasets/populate_uuid.py b/scripts/cleanup_datasets/populate_uuid.py index 9a7c2034c0ff..dd1e64ef3a80 100755 --- a/scripts/cleanup_datasets/populate_uuid.py +++ b/scripts/cleanup_datasets/populate_uuid.py @@ -6,7 +6,7 @@ Going forward, these ids will be generated for all new datasets. This script fixes datasets that were generated before the change. """ - +from __future__ import print_function import sys import uuid @@ -17,13 +17,13 @@ def usage(prog): - print "usage: %s galaxy.ini" % prog - print """ + print("usage: %s galaxy.ini" % prog) + print(""" Populates blank uuid fields in datasets with randomly generated values. Going forward, these ids will be generated for all new datasets. This script fixes datasets that were generated before the change. - """ + """) def main(): @@ -38,13 +38,13 @@ def main(): for row in model.context.query( model.Dataset ): if row.uuid is None: row.uuid = uuid.uuid4() - print "Setting dataset:", row.id, " UUID to ", row.uuid + print("Setting dataset:", row.id, " UUID to ", row.uuid) model.context.flush() for row in model.context.query( model.Workflow ): if row.uuid is None: row.uuid = uuid.uuid4() - print "Setting Workflow:", row.id, " UUID to ", row.uuid + print("Setting Workflow:", row.id, " UUID to ", row.uuid) model.context.flush() diff --git a/scripts/cleanup_datasets/remove_renamed_datasets_from_disk.py b/scripts/cleanup_datasets/remove_renamed_datasets_from_disk.py index 2132a130f8d2..2967dfbf9720 100755 --- a/scripts/cleanup_datasets/remove_renamed_datasets_from_disk.py +++ b/scripts/cleanup_datasets/remove_renamed_datasets_from_disk.py @@ -3,7 +3,7 @@ Removes a dataset file ( which was first renamed by appending _purged to the file name ) from disk. Usage: python remove_renamed_datasets_from_disk.py renamed.log """ - +from __future__ import print_function import os import sys @@ -11,15 +11,15 @@ def usage(prog): - print "usage: %s file" % prog - print """ + print("usage: %s file" % prog) + print(""" Removes a set of files from disk. The input file should contain a list of files to be deleted, one per line. The full path must be specified and must begin with /var/opt/galaxy. A log of files deleted is created in a file with the same name as that input but with .removed.log appended. - """ + """) def main(): @@ -30,7 +30,7 @@ def main(): outfile = infile + ".removed.log" out = open( outfile, 'w' ) - print >> out, "# The following renamed datasets have been removed from disk" + print("# The following renamed datasets have been removed from disk", file=out) i = 0 removed_files = 0 for i, line in enumerate( open( infile ) ): @@ -38,11 +38,11 @@ def main(): if line and line.startswith( '/var/opt/galaxy' ): try: os.unlink( line ) - print >> out, line + print(line, file=out) removed_files += 1 except Exception as exc: - print >> out, "# Error, exception " + str( exc ) + " caught attempting to remove " + line - print >> out, "# Removed " + str( removed_files ) + " files" + print("# Error, exception " + str( exc ) + " caught attempting to remove " + line, file=out) + print("# Removed " + str( removed_files ) + " files", file=out) if __name__ == "__main__": diff --git a/scripts/cleanup_datasets/rename_purged_datasets.py b/scripts/cleanup_datasets/rename_purged_datasets.py index a14d4c1fdfe4..b8fa75b4b906 100755 --- a/scripts/cleanup_datasets/rename_purged_datasets.py +++ b/scripts/cleanup_datasets/rename_purged_datasets.py @@ -3,7 +3,7 @@ Renames a dataset file by appending _purged to the file name so that it can later be removed from disk. Usage: python rename_purged_datasets.py purge.log """ - +from __future__ import print_function import os import sys @@ -11,8 +11,8 @@ def usage(prog): - print "usage: %s file" % prog - print """ + print("usage: %s file" % prog) + print(""" Marks a set of files as purged and renames them. The input file should contain a list of files to be purged, one per line. The full path must be specified and must begin with /var/opt/galaxy. @@ -20,7 +20,7 @@ def usage(prog): input but with _purged appended. The resulting files can finally be removed from disk with remove_renamed_datasets_from_disk.py, by supplying it with a list of them. - """ + """) def main(): @@ -31,7 +31,7 @@ def main(): outfile = infile + ".renamed.log" out = open( outfile, 'w' ) - print >> out, "# The following renamed datasets can be removed from disk" + print("# The following renamed datasets can be removed from disk", file=out) i = 0 renamed_files = 0 for i, line in enumerate( open( infile ) ): @@ -40,11 +40,11 @@ def main(): try: purged_filename = line + "_purged" os.rename( line, purged_filename ) - print >> out, purged_filename + print(purged_filename, file=out) renamed_files += 1 except Exception as exc: - print >> out, "# Error, exception " + str( exc ) + " caught attempting to rename " + purged_filename - print >> out, "# Renamed " + str( renamed_files ) + " files" + print("# Error, exception " + str( exc ) + " caught attempting to rename " + purged_filename, file=out) + print("# Renamed " + str( renamed_files ) + " files", file=out) if __name__ == "__main__": main() diff --git a/scripts/cleanup_datasets/update_dataset_size.py b/scripts/cleanup_datasets/update_dataset_size.py index d36694e4fa30..9f71deafd6b7 100755 --- a/scripts/cleanup_datasets/update_dataset_size.py +++ b/scripts/cleanup_datasets/update_dataset_size.py @@ -3,22 +3,22 @@ Updates dataset.size column. Remember to backup your database before running. """ - -import ConfigParser +from __future__ import print_function import os import sys import galaxy.app +from six.moves import configparser assert sys.version_info[:2] >= ( 2, 4 ) def usage(prog): - print "usage: %s galaxy.ini" % prog - print """ + print("usage: %s galaxy.ini" % prog) + print(""" Updates the dataset.size column. Users are advised to backup the database before running. - """ + """) def main(): @@ -26,7 +26,7 @@ def main(): usage(sys.argv[0]) sys.exit() ini_file = sys.argv.pop(1) - conf_parser = ConfigParser.ConfigParser( {'here': os.getcwd()} ) + conf_parser = configparser.ConfigParser( {'here': os.getcwd()} ) conf_parser.read( ini_file ) configuration = {} for key, value in conf_parser.items( "app:main" ): @@ -34,13 +34,13 @@ def main(): app = galaxy.app.UniverseApplication( global_conf=ini_file, **configuration ) # Step through Datasets, determining size on disk for each. - print "Determining the size of each dataset..." + print("Determining the size of each dataset...") for row in app.model.Dataset.table.select().execute(): purged = app.model.Dataset.get( row.id ).purged file_size = app.model.Dataset.get( row.id ).file_size if file_size is None and not purged: size_on_disk = app.model.Dataset.get( row.id ).get_size() - print "Updating Dataset.%d with file_size: %d" % ( row.id, size_on_disk ) + print("Updating Dataset.%d with file_size: %d" % ( row.id, size_on_disk )) app.model.Dataset.table.update( app.model.Dataset.table.c.id == row.id ).execute( file_size=size_on_disk ) app.shutdown() sys.exit(0) diff --git a/scripts/cleanup_datasets/update_metadata.py b/scripts/cleanup_datasets/update_metadata.py index 3911378d35b3..73b1bd58174a 100755 --- a/scripts/cleanup_datasets/update_metadata.py +++ b/scripts/cleanup_datasets/update_metadata.py @@ -5,10 +5,10 @@ Remember to backup your database before running. """ - -import ConfigParser +from __future__ import print_function import os import sys +from six.moves import configparser sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, 'lib'))) @@ -19,12 +19,12 @@ def usage(prog): - print "usage: %s galaxy.ini" % prog - print """ + print("usage: %s galaxy.ini" % prog) + print(""" Updates the metadata in the database to match rev 1981. Remember to backup your database before running. - """ + """) def main(): @@ -32,7 +32,7 @@ def main(): usage(sys.argv[0]) sys.exit() ini_file = sys.argv.pop(1) - conf_parser = ConfigParser.ConfigParser({'here': os.getcwd()}) + conf_parser = configparser.ConfigParser({'here': os.getcwd()}) conf_parser.read(ini_file) configuration = {} for key, value in conf_parser.items("app:main"): @@ -40,11 +40,11 @@ def main(): app = galaxy.app.UniverseApplication( global_conf=ini_file, **configuration ) # Search out tabular datatypes (and subclasses) and initialize metadata - print "Seeking out tabular based files and initializing metadata" + print("Seeking out tabular based files and initializing metadata") for row in app.model.Dataset.table.select().execute(): data = app.model.Dataset.get(row.id) if issubclass(type(data.datatype), type(app.datatypes_registry.get_datatype_by_extension('tabular'))): - print row.id, data.extension + print(row.id, data.extension) # Call meta_data for all tabular files # special case interval type where we do not want to overwrite chr, start, end, etc assignments if issubclass(type(data.datatype), type(app.datatypes_registry.get_datatype_by_extension('interval'))): diff --git a/scripts/data_libraries/build_whoosh_index.py b/scripts/data_libraries/build_whoosh_index.py index 29f2640d5e71..4422e881126c 100644 --- a/scripts/data_libraries/build_whoosh_index.py +++ b/scripts/data_libraries/build_whoosh_index.py @@ -8,7 +8,8 @@ Run from the ~/scripts/data_libraries directory: %sh build_whoosh_index.sh """ -import ConfigParser +from __future__ import print_function +from six.moves import configparser import os import sys @@ -34,7 +35,7 @@ def build_index( sa_session, whoosh_index_dir ): def to_unicode( a_basestr ): if type( a_basestr ) is str: - return unicode( a_basestr, 'utf-8' ) + return str( a_basestr, 'utf-8' ) else: return a_basestr lddas_indexed = 0 @@ -46,7 +47,7 @@ def to_unicode( a_basestr ): message=to_unicode( message ) ) lddas_indexed += 1 writer.commit() - print "Number of active library datasets indexed: ", lddas_indexed + print("Number of active library datasets indexed: ", lddas_indexed) def get_lddas( sa_session ): @@ -67,7 +68,7 @@ def get_lddas( sa_session ): def get_sa_session_and_needed_config_settings( ini_file ): - conf_parser = ConfigParser.ConfigParser( { 'here': os.getcwd() } ) + conf_parser = configparser.ConfigParser( { 'here': os.getcwd() } ) conf_parser.read( ini_file ) kwds = dict() for key, value in conf_parser.items( "app:main" ): diff --git a/scripts/db_shell.py b/scripts/db_shell.py index 2914a882fb42..256fc745235d 100644 --- a/scripts/db_shell.py +++ b/scripts/db_shell.py @@ -10,10 +10,15 @@ # You can also use this script as a library, for instance see https://gist.github.com/1979583 # TODO: This script overlaps a lot with manage_db.py and create_db.py, # these should maybe be refactored to remove duplication. +from __future__ import print_function import datetime import decimal import os.path import sys +from six import string_types, PY3 +if PY3: + long = int + # Setup DB scripting environment from sqlalchemy import * # noqa @@ -73,7 +78,7 @@ def render_literal_value(self, value, type_): of the DBAPI. """ - if isinstance(value, basestring): + if isinstance(value, string_types): value = value.replace("'", "''") return "'%s'" % value elif value is None: @@ -91,4 +96,4 @@ def render_literal_value(self, value, type_): ) compiler = LiteralCompiler(dialect, statement) - print compiler.process(statement) + print(compiler.process(statement)) diff --git a/scripts/drmaa_external_runner.py b/scripts/drmaa_external_runner.py index 1311348fbad6..408ad0210341 100755 --- a/scripts/drmaa_external_runner.py +++ b/scripts/drmaa_external_runner.py @@ -5,7 +5,7 @@ defining any or all of the following: args, remoteCommand, outputPath, errorPath, nativeSpecification, name, email, project """ - +from __future__ import print_function import errno import json import os @@ -129,7 +129,7 @@ def main(): s.exit() # Print the Job-ID and exit. Galaxy will pick it up from there. - print jobId + print(jobId) if __name__ == "__main__": main() diff --git a/scripts/fetch_eggs.py b/scripts/fetch_eggs.py index d3ee94c7a1db..5236a7332660 100755 --- a/scripts/fetch_eggs.py +++ b/scripts/fetch_eggs.py @@ -1,5 +1,5 @@ #!/usr/bin/env python - +from __future__ import print_function from os import pardir from os.path import join, abspath, dirname from sys import exit @@ -27,6 +27,6 @@ galaxy = abspath(join(dirname(__file__), pardir)) venv = join(galaxy, '.venv') -print msg.format(dir=abspath(join(dirname(__file__), pardir)), - venv=venv) +print(msg.format(dir=abspath(join(dirname(__file__), pardir)), + venv=venv)) exit(1) From 4ea881caa21602c5562224fa268ff87788eee0dd Mon Sep 17 00:00:00 2001 From: Eric Rasche Date: Sun, 26 Jun 2016 02:44:54 +0000 Subject: [PATCH 39/86] Fix old bug in non-allowed_images.yml GIEs --- lib/galaxy/web/base/interactive_environments.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/web/base/interactive_environments.py b/lib/galaxy/web/base/interactive_environments.py index 235e70866686..0ee921b4ca56 100644 --- a/lib/galaxy/web/base/interactive_environments.py +++ b/lib/galaxy/web/base/interactive_environments.py @@ -86,8 +86,8 @@ def load_allowed_images(self): # If we don't have an allowed images, then we fall back to image # name specified in the .ini file try: - self.allowed_images = [self.attr.viz_config.image] - self.default_image = self.attr.viz_config.image + self.allowed_images = [self.attr.viz_config.get('docker', 'image')] + self.default_image = self.attr.viz_config.get('docker', 'image') return except AttributeError: raise Exception("[{0}] Could not find allowed_images.yml, or image tag in {0}.ini file for ".format(self.attr.viz_id)) From 440c9f66d73baf83bf8dd277608dfb55358edfe3 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Sun, 26 Jun 2016 00:23:29 -0400 Subject: [PATCH 40/86] Python3: lib/galaxy/managers/ --- .ci/py3_sources.txt | 1 + lib/galaxy/managers/base.py | 11 ++++++----- lib/galaxy/managers/context.py | 7 ++++--- 3 files changed, 11 insertions(+), 8 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index ac87492704dd..13c3bc80e92a 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -25,6 +25,7 @@ lib/galaxy/exceptions/ lib/galaxy/external_services/ lib/galaxy/forms/ lib/galaxy/jobs/ +lib/galaxy/managers/ lib/galaxy/objectstore/ lib/galaxy/openid/ lib/galaxy/quota/ diff --git a/lib/galaxy/managers/base.py b/lib/galaxy/managers/base.py index 50dbd4945a6b..6150219ee6f2 100644 --- a/lib/galaxy/managers/base.py +++ b/lib/galaxy/managers/base.py @@ -25,16 +25,17 @@ # instead of the three separate classes. With no 'apparent' perfect scheme # I'm opting to just keep them separate. import datetime +import logging import re -import sqlalchemy import routes +import sqlalchemy +from six import string_types from galaxy import exceptions from galaxy import model from galaxy.model import tool_shed_install -import logging log = logging.getLogger( __name__ ) @@ -375,7 +376,7 @@ def _apply_fn_filters_gen( self, items, filters ): """ # cpu-expensive for item in items: - filter_results = map( lambda f: f( item ), filters ) + filter_results = [f( item ) for f in filters] if all( filter_results ): yield item @@ -837,7 +838,7 @@ def type( self, key, val, types ): # validators for primitives and compounds of primitives def basestring( self, key, val ): - return self.type( key, val, basestring ) + return self.type( key, val, string_types ) def bool( self, key, val ): return self.type( key, val, bool ) @@ -849,7 +850,7 @@ def nullable_basestring( self, key, val ): """ Must be a basestring or None. """ - return self.type( key, val, ( basestring, type( None ) ) ) + return self.type( key, val, ( string_types, type( None ) ) ) def int_range( self, key, val, min=None, max=None ): """ diff --git a/lib/galaxy/managers/context.py b/lib/galaxy/managers/context.py index 92b57d851079..aaa192cb4855 100644 --- a/lib/galaxy/managers/context.py +++ b/lib/galaxy/managers/context.py @@ -1,9 +1,10 @@ """ Mixins for transaction-like objects. """ - -from json import dumps import string +from json import dumps + +from six import text_type from galaxy.util import bunch @@ -20,7 +21,7 @@ def log_action( self, user=None, action=None, context=None, params=None): Application-level logging of user actions. """ if self.app.config.log_actions: - action = self.app.model.UserAction(action=action, context=context, params=unicode( dumps( params ) ) ) + action = self.app.model.UserAction(action=action, context=context, params=text_type( dumps( params ) ) ) try: if user: action.user = user From cd134239535a94d5a2f4f7822f7c29e79208c5d6 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Sun, 26 Jun 2016 02:43:57 -0400 Subject: [PATCH 41/86] Python3: small fixes to previous commits --- .../datatypes/converters/interval_to_coverage.py | 2 +- scripts/bootstrap_history.py | 10 ++++++---- scripts/cleanup_datasets/pgcleanup.py | 4 +++- scripts/data_libraries/build_whoosh_index.py | 9 ++++++--- scripts/db_shell.py | 8 +++++--- test/api/test_workflows.py | 16 ++++++++-------- test/shed_functional/base/twilltestcase.py | 4 ++-- 7 files changed, 31 insertions(+), 22 deletions(-) diff --git a/lib/galaxy/datatypes/converters/interval_to_coverage.py b/lib/galaxy/datatypes/converters/interval_to_coverage.py index 11727cc4f622..93042b62bb32 100644 --- a/lib/galaxy/datatypes/converters/interval_to_coverage.py +++ b/lib/galaxy/datatypes/converters/interval_to_coverage.py @@ -133,7 +133,7 @@ def close(self): temp_file = tempfile.NamedTemporaryFile(mode="r") environ['LC_ALL'] = 'POSIX' commandline = "sort -f -n -k %d -k %d -k %d -o %s %s" % (chr_col_1 + 1, start_col_1 + 1, end_col_1 + 1, temp_file.name, in_fname) - errorcode, stdout = subprocess.getstatusoutput(commandline) + errorcode, stdout = subprocess.check_call(commandline, shell=True) coverage = CoverageWriter( out_stream=open(out_fname, "a"), chromCol=chr_col_2, positionCol=position_col_2, diff --git a/scripts/bootstrap_history.py b/scripts/bootstrap_history.py index e2cf537ad2c7..6c15365b3aef 100644 --- a/scripts/bootstrap_history.py +++ b/scripts/bootstrap_history.py @@ -2,25 +2,27 @@ # Little script to make HISTORY.rst more easy to format properly, lots TODO # pull message down and embed, use arg parse, handle multiple, etc... from __future__ import print_function + import ast import calendar import datetime +import json import os import re import string import sys +import textwrap + try: import requests except ImportError: requests = None -import textwrap -import json try: from pygithub3 import Github except ImportError: Github = None -from six.moves.urllib.parse import urljoin from six import string_types +from six.moves.urllib.parse import urljoin PROJECT_DIRECTORY = os.path.join(os.path.dirname(__file__), os.pardir) SOURCE_DIR = os.path.join(PROJECT_DIRECTORY, "lib") @@ -598,7 +600,7 @@ def _releases(): all_files = sorted(os.listdir(RELEASES_PATH)) release_note_file_pattern = re.compile(r"\d+\.\d+.rst") release_note_files = [f for f in all_files if release_note_file_pattern.match(f)] - return sorted([f.rstrip('.rst') for f in release_note_files]) + return sorted(f.rstrip('.rst') for f in release_note_files) def _get_major_version(): diff --git a/scripts/cleanup_datasets/pgcleanup.py b/scripts/cleanup_datasets/pgcleanup.py index 39207f7ada28..4d3fc3b66ddc 100755 --- a/scripts/cleanup_datasets/pgcleanup.py +++ b/scripts/cleanup_datasets/pgcleanup.py @@ -5,15 +5,17 @@ PostgreSQL 9.1 or greater is required. """ from __future__ import print_function + import datetime import inspect import logging import os import shutil import sys -from configparser import ConfigParser from optparse import OptionParser +from six.moves.configparser import ConfigParser + galaxy_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) sys.path.insert(1, os.path.join(galaxy_root, 'lib')) diff --git a/scripts/data_libraries/build_whoosh_index.py b/scripts/data_libraries/build_whoosh_index.py index 4422e881126c..2f8a59582377 100644 --- a/scripts/data_libraries/build_whoosh_index.py +++ b/scripts/data_libraries/build_whoosh_index.py @@ -9,10 +9,13 @@ %sh build_whoosh_index.sh """ from __future__ import print_function -from six.moves import configparser + import os import sys +from six import text_type +from six.moves import configparser + sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, 'lib'))) # Whoosh is compatible with Python 2.5+ Try to import Whoosh and set flag to indicate whether search is enabled. @@ -34,8 +37,8 @@ def build_index( sa_session, whoosh_index_dir ): writer = index.writer() def to_unicode( a_basestr ): - if type( a_basestr ) is str: - return str( a_basestr, 'utf-8' ) + if not isinstance(a_basestr, text_type): + return text_type( a_basestr, 'utf-8' ) else: return a_basestr lddas_indexed = 0 diff --git a/scripts/db_shell.py b/scripts/db_shell.py index 256fc745235d..e7762c82b4f1 100644 --- a/scripts/db_shell.py +++ b/scripts/db_shell.py @@ -11,14 +11,13 @@ # TODO: This script overlaps a lot with manage_db.py and create_db.py, # these should maybe be refactored to remove duplication. from __future__ import print_function + import datetime import decimal import os.path import sys -from six import string_types, PY3 -if PY3: - long = int +from six import PY3, string_types # Setup DB scripting environment from sqlalchemy import * # noqa @@ -31,6 +30,9 @@ from galaxy.model.mapping import init from galaxy.model.orm.scripts import get_config +if PY3: + long = int + db_url = get_config( sys.argv )['db_url'] sa_session = init( '/tmp/', db_url ).context diff --git a/test/api/test_workflows.py b/test/api/test_workflows.py index ecf2caa6779f..a6a9fcde2aa5 100644 --- a/test/api/test_workflows.py +++ b/test/api/test_workflows.py @@ -377,7 +377,7 @@ def tweak_step(step): assert step_dict['position']['left'] != 1 step_dict['position'] = {'top': 1, 'left': 1} - list(map(tweak_step, iter(steps.items()))) + map(tweak_step, steps.items()) update(workflow_content) @@ -388,7 +388,7 @@ def check_step(step): assert step_dict['position']['left'] == 1 updated_workflow_content = self._download_workflow(workflow_id) - list(map(check_step, iter(updated_workflow_content['steps'].items()))) + map(check_step, updated_workflow_content['steps'].items()) # Re-update against original worklfow... update(original_workflow) @@ -396,7 +396,7 @@ def check_step(step): updated_workflow_content = self._download_workflow(workflow_id) # Make sure the positions have been updated. - list(map(tweak_step, iter(updated_workflow_content['steps'].items()))) + map(tweak_step, updated_workflow_content['steps'].items()) def test_update_no_tool_id( self ): workflow_object = self.workflow_populator.load_workflow( name="test_import" ) @@ -448,7 +448,7 @@ def test_import_annotations( self ): other_id = other_import_response.json()["id"] imported_workflow = self._show_workflow( other_id ) assert imported_workflow["annotation"] == "simple workflow" - step_annotations = set([step["annotation"] for step in imported_workflow["steps"].values()]) + step_annotations = set(step["annotation"] for step in imported_workflow["steps"].values()) assert "input1 description" in step_annotations def test_import_subworkflows( self ): @@ -1254,7 +1254,7 @@ def test_run_with_delayed_runtime_pja( self ): """) downloaded_workflow = self._download_workflow( workflow_id ) print(downloaded_workflow) - uuid_dict = dict( [( int( index_step[0] ), index_step[1]["uuid"] ) for index_step in iter(downloaded_workflow["steps"].items())] ) + uuid_dict = dict((int(index), step["uuid"]) for index, step in downloaded_workflow["steps"].items()) history_id = self.dataset_populator.new_history() hda = self.dataset_populator.new_dataset( history_id, content="1 2 3" ) self.dataset_populator.wait_for_history( history_id ) @@ -1523,15 +1523,15 @@ def _random_lines_steps( self, workflow_request ): workflow_summary_response = self._get( "workflows/%s" % workflow_request[ "workflow_id" ] ) self._assert_status_code_is( workflow_summary_response, 200 ) steps = workflow_summary_response.json()[ "steps" ] - return sorted( [step for step in list(steps.values()) if step["tool_id"] == "random_lines1"], key=lambda step: step["id"] ) + return sorted( (step for step in steps.values() if step["tool_id"] == "random_lines1"), key=lambda step: step["id"] ) def _setup_random_x2_workflow( self, name ): workflow = self.workflow_populator.load_random_x2_workflow( name ) uploaded_workflow_id = self.workflow_populator.create_workflow( workflow ) workflow_inputs = self._workflow_inputs( uploaded_workflow_id ) - key = list(workflow_inputs.keys())[ 0 ] + key = next(iter(workflow_inputs.keys())) history_id = self.dataset_populator.new_history() - ten_lines = "\n".join( map( str, list(range( 10)) ) ) + ten_lines = "\n".join( str(_) for _ in range(10) ) hda1 = self.dataset_populator.new_dataset( history_id, content=ten_lines ) workflow_request = dict( history="hist_id=%s" % history_id, diff --git a/test/shed_functional/base/twilltestcase.py b/test/shed_functional/base/twilltestcase.py index 83598a69f719..9071a3c97ac9 100644 --- a/test/shed_functional/base/twilltestcase.py +++ b/test/shed_functional/base/twilltestcase.py @@ -6,11 +6,11 @@ import tarfile import tempfile import time -from six.moves.urllib.parse import urlencode, quote_plus from json import loads import twill.commands as tc from mercurial import commands, hg, ui +from six.moves.urllib.parse import quote_plus, urlencode import galaxy.model.tool_shed_install as galaxy_model import galaxy.util @@ -1215,7 +1215,7 @@ def set_form_value( self, form, kwd, field_name, field_value ): kwd[ field_name ] = str( field_value ) else: if field_name in kwd: - log.debug( 'No field %s in form %s, discarding from return value.' % ( str( controls ), str( form_id ) ) ) + log.debug( 'No field %s in form %s, discarding from return value.', field_name, form_id ) del( kwd[ field_name ] ) return kwd From 65a937965a22c46922f9b731475561e38809e588 Mon Sep 17 00:00:00 2001 From: Pablo Moreno Date: Sun, 26 Jun 2016 08:21:24 +0100 Subject: [PATCH 42/86] Improves handling of Kubernetes logs --- lib/galaxy/jobs/runners/kubernetes.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/lib/galaxy/jobs/runners/kubernetes.py b/lib/galaxy/jobs/runners/kubernetes.py index bc69aa86edb8..aea9c8f45f84 100644 --- a/lib/galaxy/jobs/runners/kubernetes.py +++ b/lib/galaxy/jobs/runners/kubernetes.py @@ -4,6 +4,8 @@ import logging +from pykube.exceptions import HTTPError + from galaxy import model from galaxy.jobs.runners import AsynchronousJobState, AsynchronousJobRunner from os import environ as os_environ @@ -269,10 +271,14 @@ def __produce_log_file(self, job_state): pod_r = Pod.objects(self._pykube_api).filter(selector="app=" + job_state.job_id) logs = "" for pod_obj in pod_r.response['items']: - pod = Pod(self._pykube_api, pod_obj) - logs += "\n\n==== Pod " + pod.name + " log start ====\n\n" - logs += pod.logs(timestamps=True) - logs += "\n\n==== Pod " + pod.name + " log end ====" + try: + pod = Pod(self._pykube_api, pod_obj) + logs += "\n\n==== Pod " + pod.name + " log start ====\n\n" + logs += pod.logs(timestamps=True) + logs += "\n\n==== Pod " + pod.name + " log end ====" + except Exception as detail: + log.info("Could not write pods "+job_state.job_id+" log file due to HTTPError "+str(detail)) + logs_file_path = job_state.output_file logs_file = open(logs_file_path, mode="w") if isinstance(logs, text_type): From e8bf2df21fe7fb7405ea283a41eae22e6d58509f Mon Sep 17 00:00:00 2001 From: Pablo Moreno Date: Sun, 26 Jun 2016 08:26:15 +0100 Subject: [PATCH 43/86] Corrects usage of pods retrials parameters set in runner and container destinations. --- lib/galaxy/jobs/runners/kubernetes.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/lib/galaxy/jobs/runners/kubernetes.py b/lib/galaxy/jobs/runners/kubernetes.py index aea9c8f45f84..2bb058e8dc6c 100644 --- a/lib/galaxy/jobs/runners/kubernetes.py +++ b/lib/galaxy/jobs/runners/kubernetes.py @@ -211,9 +211,17 @@ def check_watched_item(self, job_state): jobs = Job.objects(self._pykube_api).filter(selector="app=" + job_state.job_id) if len(jobs.response['items']) == 1: job = Job(self._pykube_api, jobs.response['items'][0]) + job_destination = job_state.job_wrapper.job_destination succeeded = 0 active = 0 failed = 0 + + max_pod_retrials = 1 + if 'k8s_pod_retrials' in self.runner_params: + max_pod_retrials = int(self.runner_params['k8s_pod_retrials']) + if 'max_pod_retrials' in job_destination.params: + max_pod_retrials = int(job_destination.params['max_pod_retrials']) + if 'succeeded' in job.obj['status']: succeeded = job.obj['status']['succeeded'] if 'active' in job.obj['status']: @@ -230,16 +238,16 @@ def check_watched_item(self, job_state): job_state.running = False self.mark_as_finished(job_state) return None - - elif active > 0 or succeeded + active + failed == 0: + elif active > 0 and failed <= max_pod_retrials: job_state.running = True return job_state - elif failed > job_state.job_destination.params['max_pod_retrials']: + elif failed > max_pod_retrials: self.__produce_log_file(job_state) error_file = open(job_state.error_file, 'w') error_file.write("Exceeded max number of Kubernetes pod retrials allowed for job\n") error_file.close() job_state.running = False + job_state.fail_message = "More pods failed than allowed." self.mark_as_failed(job_state) job.scale(replicas=0) return None From 89b5f3f47f14fa82065b83326332313ec8cca629 Mon Sep 17 00:00:00 2001 From: Pablo Moreno Date: Sun, 26 Jun 2016 09:57:07 +0100 Subject: [PATCH 44/86] Uses exception instead of pykube.exceptions.HTTPError to manage missing pod's log exception. --- lib/galaxy/jobs/runners/kubernetes.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/lib/galaxy/jobs/runners/kubernetes.py b/lib/galaxy/jobs/runners/kubernetes.py index 2bb058e8dc6c..3e221b150b14 100644 --- a/lib/galaxy/jobs/runners/kubernetes.py +++ b/lib/galaxy/jobs/runners/kubernetes.py @@ -4,8 +4,6 @@ import logging -from pykube.exceptions import HTTPError - from galaxy import model from galaxy.jobs.runners import AsynchronousJobState, AsynchronousJobRunner from os import environ as os_environ @@ -285,7 +283,8 @@ def __produce_log_file(self, job_state): logs += pod.logs(timestamps=True) logs += "\n\n==== Pod " + pod.name + " log end ====" except Exception as detail: - log.info("Could not write pods "+job_state.job_id+" log file due to HTTPError "+str(detail)) + log.info("Could not write pod\'s " + pod_obj['metadata']['name'] + + " log file due to HTTPError "+str(detail)) logs_file_path = job_state.output_file logs_file = open(logs_file_path, mode="w") From 9db3453e6559cb883af22793b741a84f3abd071c Mon Sep 17 00:00:00 2001 From: Pablo Moreno Date: Sun, 26 Jun 2016 09:59:18 +0100 Subject: [PATCH 45/86] Overrides parent fail_job to save stdout/stderr of pods when Kubernetes job fails (otherwise lost). --- lib/galaxy/jobs/runners/kubernetes.py | 29 ++++++++++++++++++++++++++- 1 file changed, 28 insertions(+), 1 deletion(-) diff --git a/lib/galaxy/jobs/runners/kubernetes.py b/lib/galaxy/jobs/runners/kubernetes.py index 3e221b150b14..deb365c9efcd 100644 --- a/lib/galaxy/jobs/runners/kubernetes.py +++ b/lib/galaxy/jobs/runners/kubernetes.py @@ -245,7 +245,7 @@ def check_watched_item(self, job_state): error_file.write("Exceeded max number of Kubernetes pod retrials allowed for job\n") error_file.close() job_state.running = False - job_state.fail_message = "More pods failed than allowed." + job_state.fail_message = "More pods failed than allowed. See stdout for pods details." self.mark_as_failed(job_state) job.scale(replicas=0) return None @@ -273,6 +273,33 @@ def check_watched_item(self, job_state): self.mark_as_failed(job_state) return job_state + def fail_job(self, job_state): + """ + Kubernetes runner overrides fail_job (called by mark_as_failed) to rescue the pod's log files which are left as + stdout (pods logs are the natural stdout and stderr of the running processes inside the pods) and are + deleted in the parent implementation as part of the failing the job process. + + :param job_state: + :return: + """ + + # First we rescue the pods logs + with open(job_state.output_file, 'r') as outfile: + stdout_content = outfile.read() + + if getattr(job_state, 'stop_job', True): + self.stop_job(self.sa_session.query(self.app.model.Job).get(job_state.job_wrapper.job_id)) + self._handle_runner_state('failure', job_state) + # Not convinced this is the best way to indicate this state, but + # something necessary + if not job_state.runner_state_handled: + job_state.job_wrapper.fail( + message=getattr(job_state, 'fail_message', 'Job failed'), + stdout=stdout_content, stderr='See stdout for pod\'s stderr.' + ) + if job_state.job_wrapper.cleanup_job == "always": + job_state.cleanup() + def __produce_log_file(self, job_state): pod_r = Pod.objects(self._pykube_api).filter(selector="app=" + job_state.job_id) logs = "" From 693e18cdc24f8b13bb0d80fdf985e0fa3ddbf245 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Sun, 26 Jun 2016 10:16:51 -0400 Subject: [PATCH 46/86] Fix the fix --- lib/galaxy/datatypes/converters/interval_to_coverage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/datatypes/converters/interval_to_coverage.py b/lib/galaxy/datatypes/converters/interval_to_coverage.py index 93042b62bb32..47882f7be6a4 100644 --- a/lib/galaxy/datatypes/converters/interval_to_coverage.py +++ b/lib/galaxy/datatypes/converters/interval_to_coverage.py @@ -133,7 +133,7 @@ def close(self): temp_file = tempfile.NamedTemporaryFile(mode="r") environ['LC_ALL'] = 'POSIX' commandline = "sort -f -n -k %d -k %d -k %d -o %s %s" % (chr_col_1 + 1, start_col_1 + 1, end_col_1 + 1, temp_file.name, in_fname) - errorcode, stdout = subprocess.check_call(commandline, shell=True) + subprocess.check_call(commandline, shell=True) coverage = CoverageWriter( out_stream=open(out_fname, "a"), chromCol=chr_col_2, positionCol=position_col_2, From 3d688e6bcf49e15fc12f45590250d34cccd0d8cb Mon Sep 17 00:00:00 2001 From: Nate Coraor Date: Sun, 26 Jun 2016 10:17:53 -0400 Subject: [PATCH 47/86] Allow users to purge datasets by default --- config/galaxy.ini.sample | 2 +- lib/galaxy/config.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/config/galaxy.ini.sample b/config/galaxy.ini.sample index 008e093a8706..1a3b0195f9eb 100644 --- a/config/galaxy.ini.sample +++ b/config/galaxy.ini.sample @@ -922,7 +922,7 @@ use_interactive = True # Allow users to remove their datasets from disk immediately (otherwise, # datasets will be removed after a time period specified by an administrator in # the cleanup scripts run via cron) -#allow_user_dataset_purge = False +#allow_user_dataset_purge = True # By default, users' data will be public, but setting this to True will cause # it to be private. Does not affect existing users and data, only ones created diff --git a/lib/galaxy/config.py b/lib/galaxy/config.py index 3924d46ff6c9..de1523c31a27 100644 --- a/lib/galaxy/config.py +++ b/lib/galaxy/config.py @@ -162,7 +162,7 @@ def __init__( self, **kwargs ): self.require_login = string_as_bool( kwargs.get( "require_login", "False" ) ) self.allow_user_creation = string_as_bool( kwargs.get( "allow_user_creation", "True" ) ) self.allow_user_deletion = string_as_bool( kwargs.get( "allow_user_deletion", "False" ) ) - self.allow_user_dataset_purge = string_as_bool( kwargs.get( "allow_user_dataset_purge", "False" ) ) + self.allow_user_dataset_purge = string_as_bool( kwargs.get( "allow_user_dataset_purge", "True" ) ) self.allow_user_impersonation = string_as_bool( kwargs.get( "allow_user_impersonation", "False" ) ) self.new_user_dataset_access_role_default_private = string_as_bool( kwargs.get( "new_user_dataset_access_role_default_private", "False" ) ) self.collect_outputs_from = [ x.strip() for x in kwargs.get( 'collect_outputs_from', 'new_file_path,job_working_directory' ).lower().split(',') ] From a8ac700318a48b5ebc2787eb81c13e9e6927db8d Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Sun, 26 Jun 2016 11:14:00 -0400 Subject: [PATCH 48/86] Python3: lib/galaxy/model/__init__.py --- .ci/py3_sources.txt | 1 + lib/galaxy/model/__init__.py | 45 +++++++++++++++------------------ lib/galaxy/security/__init__.py | 2 +- 3 files changed, 23 insertions(+), 25 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index 13c3bc80e92a..5e171f7876dc 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -26,6 +26,7 @@ lib/galaxy/external_services/ lib/galaxy/forms/ lib/galaxy/jobs/ lib/galaxy/managers/ +lib/galaxy/model/__init__.py lib/galaxy/objectstore/ lib/galaxy/openid/ lib/galaxy/quota/ diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index b65cfd281b2c..5198f9ea6b46 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -14,35 +14,32 @@ import socket import time from datetime import datetime, timedelta -from itertools import ifilter, imap from string import Template from uuid import UUID, uuid4 -from six import string_types -from sqlalchemy import and_, func, not_, or_, true, join, select -from sqlalchemy.orm import joinedload, object_session, aliased +from six import string_types +from sqlalchemy import (and_, func, join, not_, or_, select, true, type_coerce, + types) from sqlalchemy.ext import hybrid -from sqlalchemy import types -from sqlalchemy import type_coerce +from sqlalchemy.orm import aliased, joinedload, object_session -import galaxy.model.orm.now import galaxy.model.metadata +import galaxy.model.orm.now import galaxy.security.passwords import galaxy.util from galaxy.model.item_attrs import UsesAnnotations -from galaxy.util.dictifiable import Dictifiable from galaxy.security import get_permitted_actions -from galaxy.util import Params, restore_text, send_mail -from galaxy.util import ready_name_for_url, unique_id -from galaxy.util import unicodify, directory_hash_id -from galaxy.util.multi_byte import is_multi_byte -from galaxy.util.hash_util import new_secure_hash +from galaxy.util import (directory_hash_id, Params, ready_name_for_url, + restore_text, send_mail, unicodify, unique_id) from galaxy.util.bunch import Bunch +from galaxy.util.dictifiable import Dictifiable +from galaxy.util.hash_util import new_secure_hash +from galaxy.util.multi_byte import is_multi_byte from galaxy.util.sanitize_html import sanitize_html -from galaxy.web.framework.helpers import to_unicode from galaxy.web.form_builder import (AddressField, CheckboxField, HistoryField, PasswordField, SelectField, TextArea, TextField, WorkflowField, WorkflowMappingField) +from galaxy.web.framework.helpers import to_unicode log = logging.getLogger( __name__ ) @@ -1184,7 +1181,7 @@ def add_datasets( self, sa_session, datasets, parent_id=None, genome_build=None, """ Optimized version of add_dataset above that minimizes database interactions when adding many datasets to history at once. """ - all_hdas = all( imap( is_hda, datasets ) ) + all_hdas = all( is_hda(_) for _ in datasets ) optimize = len( datasets) > 1 and parent_id is None and all_hdas and set_hid if optimize: self.__add_datasets_optimized( datasets, genome_build=genome_build ) @@ -1442,7 +1439,7 @@ def __filter_contents( self, content_class, **kwds ): if len(ids) < max_in_filter_length: query = query.filter( content_class.id.in_(ids) ) else: - query = ifilter(lambda content: content.id in ids, query) + query = (content for content in query if content.id in ids) return query def __collection_contents_iter( self, **kwds ): @@ -2064,7 +2061,7 @@ def get_converted_dataset(self, trans, target_ext): raise NoConverterException("A dependency (%s) is missing a converter." % dependency) except KeyError: pass # No deps - new_dataset = self.datatype.convert_dataset( trans, self, target_ext, return_output=True, visible=False, deps=deps, set_output_history=True ).values()[0] + new_dataset = next(iter(self.datatype.convert_dataset( trans, self, target_ext, return_output=True, visible=False, deps=deps, set_output_history=True ).values())) assoc = ImplicitlyConvertedDatasetAssociation( parent=self, file_type=target_ext, dataset=new_dataset, metadata_safe=False ) session = trans.sa_session session.add( new_dataset ) @@ -2213,7 +2210,7 @@ def get_datasources( self, trans ): """ data_sources_dict = {} msg = None - for source_type, source_list in self.datatype.data_sources.iteritems(): + for source_type, source_list in self.datatype.data_sources.items(): data_source = None if source_type == "data_standalone": # Nothing to do. @@ -2566,9 +2563,9 @@ def sort_by_attr( seq, attr ): # (seq[i].attr, i, seq[i]) and sort it. The second item of tuple is needed not # only to provide stable sorting, but mainly to eliminate comparison of objects # (which can be expensive or prohibited) in case of equal attribute values. - intermed = map( None, map( getattr, seq, ( attr, ) * len( seq ) ), xrange( len( seq ) ), seq ) + intermed = map( None, (getattr(_, attr) for _ in seq), range( len( seq ) ), seq ) intermed.sort() - return map( operator.getitem, intermed, ( -1, ) * len( intermed ) ) + return [_[-1] for _ in intermed] if folders is None: active_folders = [ folder ] for active_folder in folder.active_folders: @@ -3232,7 +3229,7 @@ def set_from_dict( self, new_data ): changed = self.collection.set_from_dict( new_data ) # unknown keys are ignored here - for key in [ k for k in new_data.keys() if k in self.editable_keys ]: + for key in ( k for k in new_data.keys() if k in self.editable_keys ): new_val = new_data[ key ] old_val = self.__getattribute__( key ) if new_val == old_val: @@ -3696,7 +3693,7 @@ def unique_workflow_outputs(self): outputs[output_name] = workflow_output else: outputs[output_name] = workflow_output - return outputs.values() + return list(outputs.values()) @property def content_id( self ): @@ -3954,7 +3951,7 @@ def poll_active_workflow_ids( ).filter( and_( *and_conditions ) ) # Immediately just load all ids into memory so time slicing logic # is relatively intutitive. - return map( lambda wi: wi.id, query.all() ) + return [wi.id for wi in query.all()] def to_dict( self, view='collection', value_mapper=None, step_details=False ): rval = super( WorkflowInvocation, self ).to_dict( view=view, value_mapper=value_mapper ) @@ -5163,4 +5160,4 @@ def copy_list(lst, *args, **kwds): if lst is None: return lst else: - return list(map(lambda el: el.copy(*args, **kwds), lst)) + return [el.copy(*args, **kwds) for el in lst] diff --git a/lib/galaxy/security/__init__.py b/lib/galaxy/security/__init__.py index 94d105371f3f..f1122e964fda 100644 --- a/lib/galaxy/security/__init__.py +++ b/lib/galaxy/security/__init__.py @@ -179,7 +179,7 @@ def sort_by_attr( self, seq, attr ): # (seq[i].attr, i, seq[i]) and sort it. The second item of tuple is needed not # only to provide stable sorting, but mainly to eliminate comparison of objects # (which can be expensive or prohibited) in case of equal attribute values. - intermed = map( None, [getattr(_, attr) for _ in seq], range( len( seq ) ), seq) + intermed = map( None, (getattr(_, attr) for _ in seq), range( len( seq ) ), seq ) intermed.sort() return [_[-1] for _ in intermed] From 1fd6efcb0c8664551ca61b00e239263776526fa1 Mon Sep 17 00:00:00 2001 From: Timur Shtatland Date: Sun, 26 Jun 2016 11:28:07 -0400 Subject: [PATCH 49/86] Python3: the rest of lib/galaxy/datatypes/converters/ lib/galaxy/datatypes/converters/pbed_ldreduced_converter.py lib/galaxy/datatypes/converters/pbed_to_lped_converter.py lib/galaxy/datatypes/converters/picard_interval_list_to_bed6_converter.py lib/galaxy/datatypes/converters/ref_to_seq_taxonomy_converter.py lib/galaxy/datatypes/converters/wiggle_to_simple_converter.py --- .ci/py3_sources.txt | 5 +++++ lib/galaxy/datatypes/converters/pbed_ldreduced_converter.py | 2 +- lib/galaxy/datatypes/converters/pbed_to_lped_converter.py | 2 +- .../converters/picard_interval_list_to_bed6_converter.py | 4 ++-- .../datatypes/converters/ref_to_seq_taxonomy_converter.py | 4 ++-- .../datatypes/converters/wiggle_to_simple_converter.py | 2 +- 6 files changed, 12 insertions(+), 7 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index 5e171f7876dc..c3de5bf7e1c9 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -15,6 +15,11 @@ lib/galaxy/datatypes/converters/lped_to_fped_converter.py lib/galaxy/datatypes/converters/lped_to_pbed_converter.py lib/galaxy/datatypes/converters/maf_to_fasta_converter.py lib/galaxy/datatypes/converters/maf_to_interval_converter.py +lib/galaxy/datatypes/converters/pbed_ldreduced_converter.py +lib/galaxy/datatypes/converters/pbed_to_lped_converter.py +lib/galaxy/datatypes/converters/picard_interval_list_to_bed6_converter.py +lib/galaxy/datatypes/converters/ref_to_seq_taxonomy_converter.py +lib/galaxy/datatypes/converters/wiggle_to_simple_converter.py lib/galaxy/datatypes/dataproviders/ lib/galaxy/datatypes/sequence.py lib/galaxy/datatypes/sniff.py diff --git a/lib/galaxy/datatypes/converters/pbed_ldreduced_converter.py b/lib/galaxy/datatypes/converters/pbed_ldreduced_converter.py index 688315f44371..9d45df033923 100644 --- a/lib/galaxy/datatypes/converters/pbed_ldreduced_converter.py +++ b/lib/galaxy/datatypes/converters/pbed_ldreduced_converter.py @@ -104,7 +104,7 @@ def main(): f.write(galhtmlprefix % prog) s1 = '## Rgenetics: http://rgenetics.org Galaxy Tools %s %s' % (prog, timenow()) # becomes info s2 = 'Input %s, winsize=%s, winmove=%s, r2thresh=%s' % (base_name, winsize, winmove, r2thresh) - print '%s %s' % (s1, s2) + print('%s %s' % (s1, s2)) f.write('
      %s\n%s\n
        ' % (s1, s2)) for i, data in enumerate( flist ): f.write('
      1. %s
      2. \n' % (os.path.split(data)[-1], os.path.split(data)[-1])) diff --git a/lib/galaxy/datatypes/converters/pbed_to_lped_converter.py b/lib/galaxy/datatypes/converters/pbed_to_lped_converter.py index c45eaeda377d..7c2806f3d620 100644 --- a/lib/galaxy/datatypes/converters/pbed_to_lped_converter.py +++ b/lib/galaxy/datatypes/converters/pbed_to_lped_converter.py @@ -68,7 +68,7 @@ def main(): with open(outhtmlname, 'w') as f: f.write(galhtmlprefix % prog) s = '## Rgenetics: http://bitbucket.org/rgalaxy Galaxy Tools %s %s' % (prog, timenow()) # becomes info - print s + print(s) f.write('
        %s\n
          ' % (s)) for i, data in enumerate( flist ): f.write('
        1. %s
        2. \n' % (os.path.split(data)[-1], os.path.split(data)[-1])) diff --git a/lib/galaxy/datatypes/converters/picard_interval_list_to_bed6_converter.py b/lib/galaxy/datatypes/converters/picard_interval_list_to_bed6_converter.py index ddd16482435d..0a1d9697c67e 100644 --- a/lib/galaxy/datatypes/converters/picard_interval_list_to_bed6_converter.py +++ b/lib/galaxy/datatypes/converters/picard_interval_list_to_bed6_converter.py @@ -25,7 +25,7 @@ def __main__(): elems = line.split( '\t' ) out.write( '%s\t%s\t%s\t%s\t0\t%s\n' % ( elems[0], int(elems[1]) - 1, elems[2], elems[4], elems[3] ) ) except Exception as e: - print e + print(e) skipped_lines += 1 if not first_skipped_line: first_skipped_line = i + 1 @@ -37,7 +37,7 @@ def __main__(): info_msg = "%i lines converted to BED. " % ( i + 1 - skipped_lines ) if skipped_lines > 0: info_msg += "Skipped %d blank/comment/invalid lines starting with line #%d." % ( skipped_lines, first_skipped_line ) - print info_msg + print(info_msg) if __name__ == "__main__": __main__() diff --git a/lib/galaxy/datatypes/converters/ref_to_seq_taxonomy_converter.py b/lib/galaxy/datatypes/converters/ref_to_seq_taxonomy_converter.py index 024731affad0..2f2cf67730a9 100644 --- a/lib/galaxy/datatypes/converters/ref_to_seq_taxonomy_converter.py +++ b/lib/galaxy/datatypes/converters/ref_to_seq_taxonomy_converter.py @@ -1,6 +1,6 @@ #!/usr/bin/env python """ -convert a ref.taxonommy file to a seq.taxonomy file +convert a ref.taxonomy file to a seq.taxonomy file Usage: %python ref_to_seq_taxonomy_converter.py """ @@ -14,7 +14,7 @@ def __main__(): infile_name = sys.argv[1] outfile = open(sys.argv[2], 'w') - for i, line in enumerate(file(infile_name)): + for i, line in enumerate(open(infile_name)): line = line.rstrip() if line and not line.startswith('#'): fields = line.split('\t') diff --git a/lib/galaxy/datatypes/converters/wiggle_to_simple_converter.py b/lib/galaxy/datatypes/converters/wiggle_to_simple_converter.py index b41c27b0f98f..92128149f027 100644 --- a/lib/galaxy/datatypes/converters/wiggle_to_simple_converter.py +++ b/lib/galaxy/datatypes/converters/wiggle_to_simple_converter.py @@ -34,7 +34,7 @@ def main(): out_file.write( "%s\n" % "\t".join( map( str, fields ) ) ) except UCSCLimitException: # Wiggle data was truncated, at the very least need to warn the user. - print 'Encountered message from UCSC: "Reached output limit of 100000 data values", so be aware your data was truncated.' + print('Encountered message from UCSC: "Reached output limit of 100000 data values", so be aware your data was truncated.') except ValueError as e: in_file.close() out_file.close() From 1b06c52e8d62c864e9a4f45a038cd3482db9f9d6 Mon Sep 17 00:00:00 2001 From: Timur Shtatland Date: Sun, 26 Jun 2016 11:33:13 -0400 Subject: [PATCH 50/86] Python3: sort, remove dup in .ci/py3_sources.txt --- .ci/py3_sources.txt | 31 +++++++++++++++---------------- 1 file changed, 15 insertions(+), 16 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index c3de5bf7e1c9..976e71e8bd98 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -39,15 +39,14 @@ lib/galaxy/sample_tracking/ lib/galaxy/security/ lib/galaxy/tags/ lib/galaxy/tools/cwl/ -lib/galaxy/tools/parser/ +lib/galaxy/tools/deps/ lib/galaxy/tools/lint.py lib/galaxy/tools/lint_util.py +lib/galaxy/tools/linters/ lib/galaxy/tools/loader.py lib/galaxy/tools/loader_directory.py -lib/galaxy/tools/linters/ -lib/galaxy/tools/deps/ -lib/galaxy/tools/toolbox/ lib/galaxy/tools/parser/ +lib/galaxy/tools/toolbox/ lib/galaxy/tours/ lib/galaxy/util/ lib/galaxy/work/ @@ -71,6 +70,8 @@ scripts/build_toolbox.py scripts/check_eggs.py scripts/check_galaxy.py scripts/check_python.py +scripts/cleanup_datasets/admin_cleanup_datasets.py +scripts/cleanup_datasets/cleanup_datasets.py scripts/cleanup_datasets/pgcleanup.py scripts/cleanup_datasets/populate_uuid.py scripts/cleanup_datasets/remove_renamed_datasets_from_disk.py @@ -80,16 +81,23 @@ scripts/cleanup_datasets/update_metadata.py scripts/data_libraries/build_whoosh_index.py scripts/db_shell.py scripts/drmaa_external_runner.py -scripts/cleanup_datasets/admin_cleanup_datasets.py -scripts/cleanup_datasets/cleanup_datasets.py +test/api/helpers.py +test/api/test_datasets.py +test/api/test_tool_data.py +test/api/test_workflow_extraction.py +test/api/test_workflows.py test/api/test_workflows_from_yaml.py test/base/ test/casperjs/ -test/unit/managers/test_DatasetManager.py test/functional/ test/integration/ test/manual/ +test/shed_functional/base/twilltestcase.py +test/shed_functional/functional_tests.py +test/unit/datatypes/dataproviders/test_base_dataproviders.py +test/unit/datatypes/dataproviders/test_line_dataproviders.py test/unit/managers/base.py +test/unit/managers/test_DatasetManager.py test/unit/test_galaxy_mapping.py test/unit/tools/test_actions.py test/unit/workflows/test_run_parameters.py @@ -99,12 +107,3 @@ tools/evolution/ tools/sr_mapping/ tools/stats/aggregate_scores_in_intervals.py tools/visualization/ -test/api/helpers.py -test/api/test_datasets.py -test/api/test_tool_data.py -test/api/test_workflow_extraction.py -test/api/test_workflows.py -test/shed_functional/base/twilltestcase.py -test/shed_functional/functional_tests.py -test/unit/datatypes/dataproviders/test_base_dataproviders.py -test/unit/datatypes/dataproviders/test_line_dataproviders.py From 2c67f1e7e59939d17fd91ca13c8d898ae887a8b7 Mon Sep 17 00:00:00 2001 From: Timur Shtatland Date: Sun, 26 Jun 2016 11:51:30 -0400 Subject: [PATCH 51/86] Python3: lib/galaxy/model/migrate/versions/0006..0013 lib/galaxy/model/migrate/versions/0006_change_qual_datatype.py lib/galaxy/model/migrate/versions/0007_sharing_histories.py lib/galaxy/model/migrate/versions/0008_galaxy_forms.py lib/galaxy/model/migrate/versions/0009_request_table.py lib/galaxy/model/migrate/versions/0010_hda_display_at_authz_table.py lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py lib/galaxy/model/migrate/versions/0012_user_address.py lib/galaxy/model/migrate/versions/0013_change_lib_item_templates_to_forms.py --- .ci/py3_sources.txt | 8 +++++ .../versions/0006_change_qual_datatype.py | 8 ++--- .../versions/0007_sharing_histories.py | 10 +++--- .../migrate/versions/0008_galaxy_forms.py | 22 ++++++------- .../migrate/versions/0009_request_table.py | 10 +++--- .../0010_hda_display_at_authz_table.py | 18 +++++------ .../versions/0011_v0010_mysql_index_fix.py | 10 +++--- .../migrate/versions/0012_user_address.py | 12 +++---- ...0013_change_lib_item_templates_to_forms.py | 32 +++++++++---------- 9 files changed, 69 insertions(+), 61 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index 976e71e8bd98..21759162474c 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -32,6 +32,14 @@ lib/galaxy/forms/ lib/galaxy/jobs/ lib/galaxy/managers/ lib/galaxy/model/__init__.py +lib/galaxy/model/migrate/versions/0006_change_qual_datatype.py +lib/galaxy/model/migrate/versions/0007_sharing_histories.py +lib/galaxy/model/migrate/versions/0008_galaxy_forms.py +lib/galaxy/model/migrate/versions/0009_request_table.py +lib/galaxy/model/migrate/versions/0010_hda_display_at_authz_table.py +lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py +lib/galaxy/model/migrate/versions/0012_user_address.py +lib/galaxy/model/migrate/versions/0013_change_lib_item_templates_to_forms.py lib/galaxy/objectstore/ lib/galaxy/openid/ lib/galaxy/quota/ diff --git a/lib/galaxy/model/migrate/versions/0006_change_qual_datatype.py b/lib/galaxy/model/migrate/versions/0006_change_qual_datatype.py index de3733670f78..4d0cf1104e16 100644 --- a/lib/galaxy/model/migrate/versions/0006_change_qual_datatype.py +++ b/lib/galaxy/model/migrate/versions/0006_change_qual_datatype.py @@ -20,10 +20,10 @@ def display_migration_details(): - print "========================================" - print "This migration script changes certain values in the history_dataset_association.extension" - print "column, specifically 'qual' is chaged to be 'qual454'." - print "========================================" + print("========================================") + print("This migration script changes certain values in the history_dataset_association.extension") + print("column, specifically 'qual' is chaged to be 'qual454'.") + print("========================================") def upgrade(migrate_engine): diff --git a/lib/galaxy/model/migrate/versions/0007_sharing_histories.py b/lib/galaxy/model/migrate/versions/0007_sharing_histories.py index 3f959c7463c6..599717e60a1f 100644 --- a/lib/galaxy/model/migrate/versions/0007_sharing_histories.py +++ b/lib/galaxy/model/migrate/versions/0007_sharing_histories.py @@ -21,11 +21,11 @@ def display_migration_details(): - print "========================================" - print "This migration script creates the new history_user_share_association table, and adds" - print "a new boolean type column to the history table. This provides support for sharing" - print "histories in the same way that workflows are shared." - print "========================================" + print("========================================") + print("This migration script creates the new history_user_share_association table, and adds") + print("a new boolean type column to the history table. This provides support for sharing") + print("histories in the same way that workflows are shared.") + print("========================================") HistoryUserShareAssociation_table = Table( "history_user_share_association", metadata, Column( "id", Integer, primary_key=True ), diff --git a/lib/galaxy/model/migrate/versions/0008_galaxy_forms.py b/lib/galaxy/model/migrate/versions/0008_galaxy_forms.py index b3adfd8370d9..9c0912e1fae0 100644 --- a/lib/galaxy/model/migrate/versions/0008_galaxy_forms.py +++ b/lib/galaxy/model/migrate/versions/0008_galaxy_forms.py @@ -31,17 +31,17 @@ def display_migration_details(): - print "========================================" - print "This migration script adds the following new tables for supporting Galaxy forms:" - print "1) form_definition_current" - print "2) form_definition" - print "3) form_values" - print "4) request_type" - print "5) request" - print "6) sample" - print "7) sample_state" - print "8) sample_event" - print "========================================" + print("========================================") + print("This migration script adds the following new tables for supporting Galaxy forms:") + print("1) form_definition_current") + print("2) form_definition") + print("3) form_values") + print("4) request_type") + print("5) request") + print("6) sample") + print("7) sample_state") + print("8) sample_event") + print("========================================") FormDefinitionCurrent_table = Table('form_definition_current', metadata, Column( "id", Integer, primary_key=True), diff --git a/lib/galaxy/model/migrate/versions/0009_request_table.py b/lib/galaxy/model/migrate/versions/0009_request_table.py index 72476ec5bf7e..da2b21d71c62 100644 --- a/lib/galaxy/model/migrate/versions/0009_request_table.py +++ b/lib/galaxy/model/migrate/versions/0009_request_table.py @@ -22,11 +22,11 @@ def display_migration_details(): - print "========================================" - print "This migration script adds a new column to 2 tables:" - print "1) a new boolean type column named 'submitted' to the 'request' table" - print "2) a new string type column named 'bar_code' to the 'sample' table" - print "========================================" + print("========================================") + print("This migration script adds a new column to 2 tables:") + print("1) a new boolean type column named 'submitted' to the 'request' table") + print("2) a new string type column named 'bar_code' to the 'sample' table") + print("========================================") def upgrade(migrate_engine): diff --git a/lib/galaxy/model/migrate/versions/0010_hda_display_at_authz_table.py b/lib/galaxy/model/migrate/versions/0010_hda_display_at_authz_table.py index 059013e550c2..ca86235123d0 100644 --- a/lib/galaxy/model/migrate/versions/0010_hda_display_at_authz_table.py +++ b/lib/galaxy/model/migrate/versions/0010_hda_display_at_authz_table.py @@ -29,15 +29,15 @@ def display_migration_details(): - print "========================================" - print "This migration script adds the history_dataset_association_display_at_authorization table, which" - print "allows 'private' datasets to be displayed at external sites without making them public." - print "" - print "If using mysql, this script will display the following error, which is corrected in the next migration" - print "script: history_dataset_association_display_at_authorization table failed: (OperationalError)" - print "(1059, 'Identifier name 'ix_history_dataset_association_display_at_authorization_update_time'" - print "is too long." - print "========================================" + print("========================================") + print("This migration script adds the history_dataset_association_display_at_authorization table, which") + print("allows 'private' datasets to be displayed at external sites without making them public.") + print("") + print("If using mysql, this script will display the following error, which is corrected in the next migration") + print("script: history_dataset_association_display_at_authorization table failed: (OperationalError)") + print("(1059, 'Identifier name 'ix_history_dataset_association_display_at_authorization_update_time'") + print("is too long.") + print("========================================") HistoryDatasetAssociationDisplayAtAuthorization_table = Table( "history_dataset_association_display_at_authorization", metadata, Column( "id", Integer, primary_key=True ), diff --git a/lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py b/lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py index 8e69bd7d1447..5fcf1e6dff2d 100644 --- a/lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py +++ b/lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py @@ -24,11 +24,11 @@ def display_migration_details(): - print "========================================" - print "This script fixes a problem introduced in the previous migration script ( 9->10 ). MySQL" - print "has a name length limit and thus the index 'ix_hdadaa_history_dataset_association_id' has" - print "to be manually created." - print "========================================" + print("========================================") + print("This script fixes a problem introduced in the previous migration script ( 9->10 ). MySQL") + print("has a name length limit and thus the index 'ix_hdadaa_history_dataset_association_id' has") + print("to be manually created.") + print("========================================") HistoryDatasetAssociationDisplayAtAuthorization_table = Table( "history_dataset_association_display_at_authorization", metadata, Column( "id", Integer, primary_key=True ), diff --git a/lib/galaxy/model/migrate/versions/0012_user_address.py b/lib/galaxy/model/migrate/versions/0012_user_address.py index 89beaf65e03d..de22c8b162b2 100644 --- a/lib/galaxy/model/migrate/versions/0012_user_address.py +++ b/lib/galaxy/model/migrate/versions/0012_user_address.py @@ -26,12 +26,12 @@ def display_migration_details(): - print "========================================" - print "This script adds a new user_address table that is currently only used with sample requests, where" - print "a user can select from a list of his addresses to associate with the request. This script also" - print "drops the request.submitted column which was boolean and replaces it with a request.state column" - print "which is a string, allowing for more flexibility with request states." - print "========================================" + print("========================================") + print("This script adds a new user_address table that is currently only used with sample requests, where") + print("a user can select from a list of his addresses to associate with the request. This script also") + print("drops the request.submitted column which was boolean and replaces it with a request.state column") + print("which is a string, allowing for more flexibility with request states.") + print("========================================") UserAddress_table = Table( "user_address", metadata, Column( "id", Integer, primary_key=True), diff --git a/lib/galaxy/model/migrate/versions/0013_change_lib_item_templates_to_forms.py b/lib/galaxy/model/migrate/versions/0013_change_lib_item_templates_to_forms.py index a7884890d24b..f781ddde61c6 100644 --- a/lib/galaxy/model/migrate/versions/0013_change_lib_item_templates_to_forms.py +++ b/lib/galaxy/model/migrate/versions/0013_change_lib_item_templates_to_forms.py @@ -32,22 +32,22 @@ def display_migration_details(): - print "========================================" - print "This migration script eliminates all of the tables that were used for the 1st version of the" - print "library templates where template fields and contents were each stored as a separate table row" - print "in various library item tables. All of these tables are dropped in this script, eliminating all" - print "existing template data. A total of 14 existing tables are dropped." - print "" - print "We're now basing library templates on Galaxy forms, so field contents are stored as a jsonified" - print "list in the form_values table. This script introduces the following 3 new association tables:" - print "1) library_info_association" - print "2) library_folder_info_association" - print "3) library_dataset_dataset_info_association" - print "" - print "If using mysql, this script will throw an (OperationalError) exception due to a long index name" - print "on the library_dataset_dataset_info_association table, which is OK because the script creates" - print "an index with a shortened name." - print "========================================" + print("========================================") + print("This migration script eliminates all of the tables that were used for the 1st version of the") + print("library templates where template fields and contents were each stored as a separate table row") + print("in various library item tables. All of these tables are dropped in this script, eliminating all") + print("existing template data. A total of 14 existing tables are dropped.") + print("") + print("We're now basing library templates on Galaxy forms, so field contents are stored as a jsonified") + print("list in the form_values table. This script introduces the following 3 new association tables:") + print("1) library_info_association") + print("2) library_folder_info_association") + print("3) library_dataset_dataset_info_association") + print("") + print("If using mysql, this script will throw an (OperationalError) exception due to a long index name") + print("on the library_dataset_dataset_info_association table, which is OK because the script creates") + print("an index with a shortened name.") + print("========================================") LibraryInfoAssociation_table = Table( 'library_info_association', metadata, Column( "id", Integer, primary_key=True), From de99c7efe2d6c540d6388206ae7673a54204d85d Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Sun, 26 Jun 2016 12:02:50 -0400 Subject: [PATCH 52/86] Python3: lib/galaxy/visualization/ --- .ci/py3_sources.txt | 1 + .../data_providers/phyloviz/newickparser.py | 10 +++++----- lib/galaxy/visualization/genomes.py | 10 +++++----- 3 files changed, 11 insertions(+), 10 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index 21759162474c..084d0f539a42 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -57,6 +57,7 @@ lib/galaxy/tools/parser/ lib/galaxy/tools/toolbox/ lib/galaxy/tours/ lib/galaxy/util/ +lib/galaxy/visualization/ lib/galaxy/work/ lib/galaxy_ext/ lib/galaxy_utils/ diff --git a/lib/galaxy/visualization/data_providers/phyloviz/newickparser.py b/lib/galaxy/visualization/data_providers/phyloviz/newickparser.py index 521cb63d83b9..fe17cfc53aa6 100644 --- a/lib/galaxy/visualization/data_providers/phyloviz/newickparser.py +++ b/lib/galaxy/visualization/data_providers/phyloviz/newickparser.py @@ -1,5 +1,5 @@ -from __future__ import with_statement -from baseparser import Base_Parser, PhyloTree + +from .baseparser import Base_Parser, PhyloTree import re @@ -81,7 +81,7 @@ def _mapName(self, newickString, nameMap): start = 0 end = 0 - for i in xrange(len(newickString)): + for i in range(len(newickString)): if newickString[i] == "(" or newickString[i] == ",": if re.match(r"[,(]", newickString[i + 1:]): continue @@ -89,7 +89,7 @@ def _mapName(self, newickString, nameMap): end = i + 1 # i now refers to the starting position of the term to be replaced, # we will next find j which is the ending pos of the term - for j in xrange(i + 1, len(newickString)): + for j in range(i + 1, len(newickString)): enclosingSymbol = newickString[j] # the immediate symbol after a common or left bracket which denotes the end of a term if enclosingSymbol == ")" or enclosingSymbol == ":" or enclosingSymbol == ",": termToReplace = newickString[end:j] @@ -124,7 +124,7 @@ def parseNode(self, string, depth): lenOfPreceedingInternalNodeString = 0 bracketStack = [] - for j in xrange(len(string)): + for j in range(len(string)): if string[j] == "(": # finding the positions of all the open brackets bracketStack.append(j) continue diff --git a/lib/galaxy/visualization/genomes.py b/lib/galaxy/visualization/genomes.py index 6a43139b34fb..9c73573f5ae2 100644 --- a/lib/galaxy/visualization/genomes.py +++ b/lib/galaxy/visualization/genomes.py @@ -101,7 +101,7 @@ def split_by_number(s): if num: num = int( num ) else: - num = sys.maxint + num = sys.maxsize # just a big number if low: low = int( low ) @@ -163,14 +163,14 @@ def split_by_number(s): # Set flag to indicate whether there are more chroms after list. next_chroms = False try: - len_file_enumerate.next() + next(len_file_enumerate) next_chroms = True except: # No more chroms to read. pass - to_sort = [{ 'chrom': chrm, 'len': length } for chrm, length in chroms.iteritems()] - to_sort.sort(lambda a, b: cmp( split_by_number(a['chrom']), split_by_number(b['chrom']) )) + to_sort = [{ 'chrom': chrm, 'len': length } for chrm, length in chroms.items()] + to_sort.sort(key=lambda _: split_by_number(_['chrom'])) return { 'id': self.key, 'reference': self.twobit_file is not None, @@ -232,7 +232,7 @@ def reload_genomes( self ): def check_and_reload( self ): # Check if tables have been modified, if so reload - for table_name, table_version in self._table_versions.iteritems(): + for table_name, table_version in self._table_versions.items(): table = self.app.tool_data_tables.get( table_name, None ) if table is not None and not table.is_current_version( table_version ): return self.reload_genomes() From 005429acbb2c9e09b4b653dc93b4eff2ee883a77 Mon Sep 17 00:00:00 2001 From: Timur Shtatland Date: Sun, 26 Jun 2016 12:07:49 -0400 Subject: [PATCH 53/86] Python3: lib/galaxy/model/migrate/versions/0014..0023 lib/galaxy/model/migrate/versions/0014_pages.py lib/galaxy/model/migrate/versions/0015_tagging.py lib/galaxy/model/migrate/versions/0016_v0015_mysql_index_fix.py lib/galaxy/model/migrate/versions/0017_library_item_indexes.py lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py lib/galaxy/model/migrate/versions/0019_request_library_folder.py lib/galaxy/model/migrate/versions/0020_library_upload_job.py lib/galaxy/model/migrate/versions/0021_user_prefs.py lib/galaxy/model/migrate/versions/0022_visualization_tables.py lib/galaxy/model/migrate/versions/0023_page_published_and_deleted_columns.py --- .ci/py3_sources.txt | 10 ++++++ .../model/migrate/versions/0014_pages.py | 2 +- .../model/migrate/versions/0015_tagging.py | 36 +++++++++---------- .../versions/0016_v0015_mysql_index_fix.py | 12 +++---- .../versions/0017_library_item_indexes.py | 8 ++--- .../0018_ordered_tags_and_page_tags.py | 18 +++++----- .../versions/0019_request_library_folder.py | 8 ++--- .../versions/0020_library_upload_job.py | 18 +++++----- .../model/migrate/versions/0021_user_prefs.py | 10 +++--- .../versions/0022_visualization_tables.py | 2 +- ...0023_page_published_and_deleted_columns.py | 2 +- 11 files changed, 68 insertions(+), 58 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index 084d0f539a42..25feb081b910 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -40,6 +40,16 @@ lib/galaxy/model/migrate/versions/0010_hda_display_at_authz_table.py lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py lib/galaxy/model/migrate/versions/0012_user_address.py lib/galaxy/model/migrate/versions/0013_change_lib_item_templates_to_forms.py +lib/galaxy/model/migrate/versions/0014_pages.py +lib/galaxy/model/migrate/versions/0015_tagging.py +lib/galaxy/model/migrate/versions/0016_v0015_mysql_index_fix.py +lib/galaxy/model/migrate/versions/0017_library_item_indexes.py +lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py +lib/galaxy/model/migrate/versions/0019_request_library_folder.py +lib/galaxy/model/migrate/versions/0020_library_upload_job.py +lib/galaxy/model/migrate/versions/0021_user_prefs.py +lib/galaxy/model/migrate/versions/0022_visualization_tables.py +lib/galaxy/model/migrate/versions/0023_page_published_and_deleted_columns.py lib/galaxy/objectstore/ lib/galaxy/openid/ lib/galaxy/quota/ diff --git a/lib/galaxy/model/migrate/versions/0014_pages.py b/lib/galaxy/model/migrate/versions/0014_pages.py index 971627512128..3bdbfc71fa9c 100644 --- a/lib/galaxy/model/migrate/versions/0014_pages.py +++ b/lib/galaxy/model/migrate/versions/0014_pages.py @@ -33,7 +33,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: if migrate_engine.name == 'mysql': diff --git a/lib/galaxy/model/migrate/versions/0015_tagging.py b/lib/galaxy/model/migrate/versions/0015_tagging.py index 13f1ee4344ad..9fc4c83ed3c5 100644 --- a/lib/galaxy/model/migrate/versions/0015_tagging.py +++ b/lib/galaxy/model/migrate/versions/0015_tagging.py @@ -24,16 +24,16 @@ def display_migration_details(): - print "" - print "This migration script adds the tables necessary to support tagging of histories," - print "datasets, and history-dataset associations (user views of datasets)." - print "" - print "If using mysql, this script will display the following error, which is " - print "corrected in the next migration script:" - print "history_dataset_association_tag_association table failed: " - print "(OperationalError) (1059, 'Identifier name " - print "'ix_history_dataset_association_tag_association_history_dataset_association_id'" - print "is too long)" + print("") + print("This migration script adds the tables necessary to support tagging of histories,") + print("datasets, and history-dataset associations (user views of datasets).") + print("") + print("If using mysql, this script will display the following error, which is ") + print("corrected in the next migration script:") + print("history_dataset_association_tag_association table failed: ") + print("(OperationalError) (1059, 'Identifier name ") + print("'ix_history_dataset_association_tag_association_history_dataset_association_id'") + print("is too long)") # New tables to support tagging of histories, datasets, and history-dataset associations. @@ -73,22 +73,22 @@ def upgrade(migrate_engine): try: Tag_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating tag table failed: %s" % str( e ) ) try: HistoryTagAssociation_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating history_tag_association table failed: %s" % str( e ) ) try: DatasetTagAssociation_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating dataset_tag_association table failed: %s" % str( e ) ) try: HistoryDatasetAssociationTagAssociation_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating history_dataset_association_tag_association table failed: %s" % str( e ) ) @@ -98,20 +98,20 @@ def downgrade(migrate_engine): try: Tag_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping tag table failed: %s" % str( e ) ) try: HistoryTagAssociation_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping history_tag_association table failed: %s" % str( e ) ) try: DatasetTagAssociation_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping dataset_tag_association table failed: %s" % str( e ) ) try: HistoryDatasetAssociationTagAssociation_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping history_dataset_association_tag_association table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0016_v0015_mysql_index_fix.py b/lib/galaxy/model/migrate/versions/0016_v0015_mysql_index_fix.py index d8b415000e6c..4890efb57b94 100644 --- a/lib/galaxy/model/migrate/versions/0016_v0015_mysql_index_fix.py +++ b/lib/galaxy/model/migrate/versions/0016_v0015_mysql_index_fix.py @@ -17,10 +17,10 @@ def display_migration_details(): - print "" - print "This script fixes a problem introduced in 0015_tagging.py. MySQL has a" - print "name length limit and thus the index 'ix_hda_ta_history_dataset_association_id'" - print "has to be manually created." + print("") + print("This script fixes a problem introduced in 0015_tagging.py. MySQL has a") + print("name length limit and thus the index 'ix_hda_ta_history_dataset_association_id'") + print("has to be manually created.") HistoryDatasetAssociationTagAssociation_table = Table( "history_dataset_association_tag_association", metadata, Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ), @@ -38,7 +38,7 @@ def upgrade(migrate_engine): try: i.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Adding index 'ix_hdata_history_dataset_association_id' to table 'history_dataset_association_tag_association' table failed: %s" % str( e ) ) @@ -49,5 +49,5 @@ def downgrade(migrate_engine): try: i.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Removing index 'ix_hdata_history_dataset_association_id' to table 'history_dataset_association_tag_association' table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0017_library_item_indexes.py b/lib/galaxy/model/migrate/versions/0017_library_item_indexes.py index e79c88979401..84c01ea658b7 100644 --- a/lib/galaxy/model/migrate/versions/0017_library_item_indexes.py +++ b/lib/galaxy/model/migrate/versions/0017_library_item_indexes.py @@ -18,10 +18,10 @@ def display_migration_details(): - print "========================================" - print "This script adds 3 indexes to table columns: library_folder.name," - print "library_dataset.name, library_dataset_dataset_association.name." - print "========================================" + print("========================================") + print("This script adds 3 indexes to table columns: library_folder.name,") + print("library_dataset.name, library_dataset_dataset_association.name.") + print("========================================") def upgrade(migrate_engine): diff --git a/lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py b/lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py index bf2c144f22ee..8027a806169c 100644 --- a/lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py +++ b/lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py @@ -17,9 +17,9 @@ def display_migration_details(): - print "" - print "This migration script provides support for (a) ordering tags by recency and" - print "(b) tagging pages. This script deletes all existing tags." + print("") + print("This migration script provides support for (a) ordering tags by recency and") + print("(b) tagging pages. This script deletes all existing tags.") HistoryTagAssociation_table = Table( "history_tag_association", metadata, Column( "id", Integer, primary_key=True ), @@ -66,14 +66,14 @@ def upgrade(migrate_engine): HistoryTagAssociation_table.drop() HistoryTagAssociation_table.create() except Exception as e: - print "Recreating history_tag_association table failed: %s" % str( e ) + print("Recreating history_tag_association table failed: %s" % str( e )) log.debug( "Recreating history_tag_association table failed: %s" % str( e ) ) try: DatasetTagAssociation_table.drop() DatasetTagAssociation_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Recreating dataset_tag_association table failed: %s" % str( e ) ) try: @@ -88,17 +88,17 @@ def upgrade(migrate_engine): try: i.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Adding index 'ix_hda_ta_history_dataset_association_id' to table 'history_dataset_association_tag_association' table failed: %s" % str( e ) ) except Exception as e: - print str(e) + print(str(e)) log.debug( "Recreating history_dataset_association_tag_association table failed: %s" % str( e ) ) # Create page_tag_association table. try: PageTagAssociation_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating page_tag_association table failed: %s" % str( e ) ) @@ -112,5 +112,5 @@ def downgrade(migrate_engine): try: PageTagAssociation_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping page_tag_association table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0019_request_library_folder.py b/lib/galaxy/model/migrate/versions/0019_request_library_folder.py index 3092cb12a60f..651662292114 100644 --- a/lib/galaxy/model/migrate/versions/0019_request_library_folder.py +++ b/lib/galaxy/model/migrate/versions/0019_request_library_folder.py @@ -21,11 +21,11 @@ def display_migration_details(): - print "========================================" - print """This script creates a request.folder_id column which is a foreign + print("========================================") + print("""This script creates a request.folder_id column which is a foreign key to the library_folder table. This also adds a 'type' and 'layout' column -to the form_definition table.""" - print "========================================" +to the form_definition table.""") + print("========================================") def upgrade(migrate_engine): diff --git a/lib/galaxy/model/migrate/versions/0020_library_upload_job.py b/lib/galaxy/model/migrate/versions/0020_library_upload_job.py index fb3f6af0c363..b6be694e26f5 100644 --- a/lib/galaxy/model/migrate/versions/0020_library_upload_job.py +++ b/lib/galaxy/model/migrate/versions/0020_library_upload_job.py @@ -19,14 +19,14 @@ def display_migration_details(): - print "" - print "========================================" - print """This script creates a job_to_output_library_dataset table for allowing library + print("") + print("========================================") + print("""This script creates a job_to_output_library_dataset table for allowing library uploads to run as regular jobs. To support this, a library_folder_id column is added to the job table, and library_folder/output_library_datasets relations are added to the Job object. An index is also added to the dataset.state -column.""" - print "========================================" +column.""") + print("========================================") JobToOutputLibraryDatasetAssociation_table = Table( "job_to_output_library_dataset", metadata, Column( "id", Integer, primary_key=True ), @@ -44,7 +44,7 @@ def upgrade(migrate_engine): try: JobToOutputLibraryDatasetAssociation_table.create() except Exception as e: - print "Creating job_to_output_library_dataset table failed: %s" % str( e ) + print("Creating job_to_output_library_dataset table failed: %s" % str( e )) log.debug( "Creating job_to_output_library_dataset table failed: %s" % str( e ) ) # Create the library_folder_id column try: @@ -86,7 +86,7 @@ def upgrade(migrate_engine): try: i.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Adding index 'ix_dataset_state' to dataset table failed: %s" % str( e ) ) @@ -109,7 +109,7 @@ def downgrade(migrate_engine): try: JobToOutputLibraryDatasetAssociation_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping job_to_output_library_dataset table failed: %s" % str( e ) ) # Drop the ix_dataset_state index try: @@ -121,5 +121,5 @@ def downgrade(migrate_engine): try: i.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping index 'ix_dataset_state' from dataset table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0021_user_prefs.py b/lib/galaxy/model/migrate/versions/0021_user_prefs.py index e7647e0d398c..b117daf3cafe 100644 --- a/lib/galaxy/model/migrate/versions/0021_user_prefs.py +++ b/lib/galaxy/model/migrate/versions/0021_user_prefs.py @@ -12,9 +12,9 @@ def display_migration_details(): - print "" - print "This migration script adds a user preferences table to Galaxy." - print "" + print("") + print("This migration script adds a user preferences table to Galaxy.") + print("") # New table to support user preferences. @@ -33,7 +33,7 @@ def upgrade(migrate_engine): try: UserPreference_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating user_preference table failed: %s" % str( e ) ) @@ -43,5 +43,5 @@ def downgrade(migrate_engine): try: UserPreference_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping user_preference table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0022_visualization_tables.py b/lib/galaxy/model/migrate/versions/0022_visualization_tables.py index 47191e43dad6..23172a8def9b 100644 --- a/lib/galaxy/model/migrate/versions/0022_visualization_tables.py +++ b/lib/galaxy/model/migrate/versions/0022_visualization_tables.py @@ -32,7 +32,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: Visualization_table.create() diff --git a/lib/galaxy/model/migrate/versions/0023_page_published_and_deleted_columns.py b/lib/galaxy/model/migrate/versions/0023_page_published_and_deleted_columns.py index f5edaa28cb57..b2b5c5e83a46 100644 --- a/lib/galaxy/model/migrate/versions/0023_page_published_and_deleted_columns.py +++ b/lib/galaxy/model/migrate/versions/0023_page_published_and_deleted_columns.py @@ -13,7 +13,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() Page_table = Table( "page", metadata, autoload=True ) From 9ad9c0c80a1521abec0404e319ef41d89d5a9c50 Mon Sep 17 00:00:00 2001 From: Eric Enns Date: Sun, 26 Jun 2016 14:08:06 -0500 Subject: [PATCH 54/86] Python 3: remaining model migrations --- .ci/py3_sources.txt | 106 ++++++++++++++++++ .../0024_page_slug_unique_constraint.py | 2 +- .../model/migrate/versions/0025_user_info.py | 6 +- .../migrate/versions/0026_cloud_tables.py | 6 +- .../migrate/versions/0027_request_events.py | 8 +- .../0028_external_metadata_file_override.py | 10 +- .../migrate/versions/0029_user_actions.py | 10 +- .../versions/0030_history_slug_column.py | 2 +- .../0031_community_and_workflow_tags.py | 28 ++--- .../0032_stored_workflow_slug_column.py | 2 +- ...lished_cols_for_histories_and_workflows.py | 14 +-- .../0034_page_user_share_association.py | 6 +- ...item_annotations_and_workflow_step_tags.py | 24 ++-- ...column_to_library_template_assoc_tables.py | 2 +- .../migrate/versions/0037_samples_library.py | 2 +- ...column_to_library_template_assoc_tables.py | 2 +- ...39_add_synopsis_column_to_library_table.py | 2 +- .../migrate/versions/0040_page_annotations.py | 6 +- .../versions/0041_workflow_invocation.py | 2 +- .../versions/0042_workflow_invocation_fix.py | 2 +- ...isualization_sharing_tagging_annotating.py | 34 +++--- ...0044_add_notify_column_to_request_table.py | 2 +- .../0045_request_type_permissions_table.py | 2 +- .../migrate/versions/0046_post_job_actions.py | 2 +- .../versions/0047_job_table_user_id_column.py | 12 +- .../0048_dataset_instance_state_column.py | 2 +- .../migrate/versions/0049_api_keys_table.py | 2 +- .../versions/0050_drop_cloud_tables.py | 6 +- .../0051_imported_col_for_jobs_table.py | 6 +- .../versions/0052_sample_dataset_table.py | 2 +- .../migrate/versions/0053_item_ratings.py | 24 ++-- .../versions/0054_visualization_dbkey.py | 2 +- .../versions/0055_add_pja_assoc_for_jobs.py | 2 +- .../migrate/versions/0056_workflow_outputs.py | 2 +- .../migrate/versions/0057_request_notify.py | 2 +- .../versions/0058_history_import_export.py | 6 +- .../versions/0059_sample_dataset_file_path.py | 2 +- .../versions/0060_history_archive_import.py | 4 +- .../model/migrate/versions/0061_tasks.py | 2 +- .../versions/0062_user_openid_table.py | 2 +- .../migrate/versions/0063_sequencer_table.py | 2 +- ...d_run_and_sample_run_association_tables.py | 2 +- ...0065_add_name_to_form_fields_and_values.py | 2 +- ...66_deferred_job_and_transfer_job_tables.py | 2 +- .../versions/0067_populate_sequencer_table.py | 2 +- ...8_rename_sequencer_to_external_services.py | 2 +- .../0069_rename_sequencer_form_type.py | 2 +- ...0_add_info_column_to_deferred_job_table.py | 6 +- ...0071_add_history_and_workflow_to_sample.py | 8 +- ...nd_socket_columns_to_transfer_job_table.py | 6 +- ...3_add_ldda_to_implicit_conversion_table.py | 6 +- ..._purged_column_to_library_dataset_table.py | 8 +- .../0075_add_subindex_column_to_run_table.py | 6 +- .../0076_fix_form_values_data_corruption.py | 14 +-- .../0077_create_tool_tag_association_table.py | 2 +- ...8_add_columns_for_disk_usage_accounting.py | 18 +-- .../0079_input_library_to_job_table.py | 6 +- .../migrate/versions/0080_quota_tables.py | 2 +- .../0081_add_tool_version_to_hda_ldda.py | 6 +- .../0082_add_tool_shed_repository_table.py | 2 +- .../0083_add_prepare_files_to_task.py | 12 +- ...dd_ldda_id_to_implicit_conversion_table.py | 6 +- .../migrate/versions/0085_add_task_info.py | 6 +- ..._add_tool_shed_repository_table_columns.py | 14 +-- .../versions/0087_tool_id_guid_map_table.py | 2 +- ..._add_installed_changeset_revison_column.py | 8 +- .../0089_add_object_store_id_columns.py | 6 +- ..._add_tool_shed_repository_table_columns.py | 10 +- .../versions/0091_add_tool_version_tables.py | 4 +- .../versions/0092_add_migrate_tools_table.py | 2 +- .../versions/0093_add_job_params_col.py | 8 +- .../versions/0094_add_job_handler_col.py | 8 +- .../migrate/versions/0095_hda_subsets.py | 8 +- .../migrate/versions/0096_openid_provider.py | 6 +- .../versions/0097_add_ctx_rev_column.py | 6 +- .../0098_genome_index_tool_data_table.py | 2 +- .../0099_add_tool_dependency_table.py | 2 +- ...er_tool_dependency_table_version_column.py | 2 +- ...rop_installed_changeset_revision_column.py | 2 +- ...0102_add_tool_dependency_status_columns.py | 14 +-- ...add_tool_shed_repository_status_columns.py | 28 ++--- .../versions/0105_add_cleanup_event_table.py | 2 +- .../versions/0106_add_missing_indexes.py | 2 +- .../0107_add_exit_code_to_job_and_task.py | 10 +- .../versions/0108_add_extended_metadata.py | 6 +- .../0109_add_repository_dependency_tables.py | 2 +- .../migrate/versions/0110_add_dataset_uuid.py | 8 +- .../versions/0111_add_job_destinations.py | 6 +- ...and_data_manager_job_association_tables.py | 2 +- .../0113_update_migrate_tools_table.py | 2 +- .../0114_update_migrate_tools_table_again.py | 2 +- ..._available_col_add_tool_shed_status_col.py | 10 +- .../versions/0117_add_user_activation.py | 8 +- .../0118_add_hda_extended_metadata.py | 6 +- .../migrate/versions/0119_job_metrics.py | 6 +- .../versions/0120_dataset_collections.py | 10 +- .../migrate/versions/0121_workflow_uuids.py | 6 +- .../migrate/versions/0122_grow_mysql_blobs.py | 6 +- .../0123_add_workflow_request_tables.py | 10 +- .../versions/0124_job_state_history.py | 6 +- .../versions/0125_workflow_step_tracking.py | 6 +- .../migrate/versions/0126_password_reset.py | 6 +- .../0127_output_collection_adjustments.py | 10 +- .../migrate/versions/0128_session_timeout.py | 6 +- ...9_job_external_output_metadata_validity.py | 6 +- .../versions/0130_change_pref_datatype.py | 2 +- ...subworkflow_and_input_parameter_modules.py | 12 +- 107 files changed, 442 insertions(+), 336 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index 25feb081b910..46b0bf5ff529 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -50,6 +50,112 @@ lib/galaxy/model/migrate/versions/0020_library_upload_job.py lib/galaxy/model/migrate/versions/0021_user_prefs.py lib/galaxy/model/migrate/versions/0022_visualization_tables.py lib/galaxy/model/migrate/versions/0023_page_published_and_deleted_columns.py +lib/galaxy/model/migrate/versions/0024_page_slug_unique_constraint.py +lib/galaxy/model/migrate/versions/0025_user_info.py +lib/galaxy/model/migrate/versions/0026_cloud_tables.py +lib/galaxy/model/migrate/versions/0027_request_events.py +lib/galaxy/model/migrate/versions/0028_external_metadata_file_override.py +lib/galaxy/model/migrate/versions/0029_user_actions.py +lib/galaxy/model/migrate/versions/0030_history_slug_column.py +lib/galaxy/model/migrate/versions/0031_community_and_workflow_tags.py +lib/galaxy/model/migrate/versions/0032_stored_workflow_slug_column.py +lib/galaxy/model/migrate/versions/0033_published_cols_for_histories_and_workflows.py +lib/galaxy/model/migrate/versions/0034_page_user_share_association.py +lib/galaxy/model/migrate/versions/0035_item_annotations_and_workflow_step_tags.py +lib/galaxy/model/migrate/versions/0036_add_deleted_column_to_library_template_assoc_tables.py +lib/galaxy/model/migrate/versions/0037_samples_library.py +lib/galaxy/model/migrate/versions/0038_add_inheritable_column_to_library_template_assoc_tables.py +lib/galaxy/model/migrate/versions/0039_add_synopsis_column_to_library_table.py +lib/galaxy/model/migrate/versions/0040_page_annotations.py +lib/galaxy/model/migrate/versions/0041_workflow_invocation.py +lib/galaxy/model/migrate/versions/0042_workflow_invocation_fix.py +lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py +lib/galaxy/model/migrate/versions/0044_add_notify_column_to_request_table.py +lib/galaxy/model/migrate/versions/0045_request_type_permissions_table.py +lib/galaxy/model/migrate/versions/0046_post_job_actions.py +lib/galaxy/model/migrate/versions/0047_job_table_user_id_column.py +lib/galaxy/model/migrate/versions/0048_dataset_instance_state_column.py +lib/galaxy/model/migrate/versions/0049_api_keys_table.py +lib/galaxy/model/migrate/versions/0050_drop_cloud_tables.py +lib/galaxy/model/migrate/versions/0051_imported_col_for_jobs_table.py +lib/galaxy/model/migrate/versions/0052_sample_dataset_table.py +lib/galaxy/model/migrate/versions/0053_item_ratings.py +lib/galaxy/model/migrate/versions/0054_visualization_dbkey.py +lib/galaxy/model/migrate/versions/0055_add_pja_assoc_for_jobs.py +lib/galaxy/model/migrate/versions/0056_workflow_outputs.py +lib/galaxy/model/migrate/versions/0057_request_notify.py +lib/galaxy/model/migrate/versions/0058_history_import_export.py +lib/galaxy/model/migrate/versions/0059_sample_dataset_file_path.py +lib/galaxy/model/migrate/versions/0060_history_archive_import.py +lib/galaxy/model/migrate/versions/0061_tasks.py +lib/galaxy/model/migrate/versions/0062_user_openid_table.py +lib/galaxy/model/migrate/versions/0063_sequencer_table.py +lib/galaxy/model/migrate/versions/0064_add_run_and_sample_run_association_tables.py +lib/galaxy/model/migrate/versions/0065_add_name_to_form_fields_and_values.py +lib/galaxy/model/migrate/versions/0066_deferred_job_and_transfer_job_tables.py +lib/galaxy/model/migrate/versions/0067_populate_sequencer_table.py +lib/galaxy/model/migrate/versions/0068_rename_sequencer_to_external_services.py +lib/galaxy/model/migrate/versions/0069_rename_sequencer_form_type.py +lib/galaxy/model/migrate/versions/0070_add_info_column_to_deferred_job_table.py +lib/galaxy/model/migrate/versions/0071_add_history_and_workflow_to_sample.py +lib/galaxy/model/migrate/versions/0072_add_pid_and_socket_columns_to_transfer_job_table.py +lib/galaxy/model/migrate/versions/0073_add_ldda_to_implicit_conversion_table.py +lib/galaxy/model/migrate/versions/0074_add_purged_column_to_library_dataset_table.py +lib/galaxy/model/migrate/versions/0075_add_subindex_column_to_run_table.py +lib/galaxy/model/migrate/versions/0076_fix_form_values_data_corruption.py +lib/galaxy/model/migrate/versions/0077_create_tool_tag_association_table.py +lib/galaxy/model/migrate/versions/0078_add_columns_for_disk_usage_accounting.py +lib/galaxy/model/migrate/versions/0079_input_library_to_job_table.py +lib/galaxy/model/migrate/versions/0080_quota_tables.py +lib/galaxy/model/migrate/versions/0081_add_tool_version_to_hda_ldda.py +lib/galaxy/model/migrate/versions/0082_add_tool_shed_repository_table.py +lib/galaxy/model/migrate/versions/0083_add_prepare_files_to_task.py +lib/galaxy/model/migrate/versions/0084_add_ldda_id_to_implicit_conversion_table.py +lib/galaxy/model/migrate/versions/0085_add_task_info.py +lib/galaxy/model/migrate/versions/0086_add_tool_shed_repository_table_columns.py +lib/galaxy/model/migrate/versions/0087_tool_id_guid_map_table.py +lib/galaxy/model/migrate/versions/0088_add_installed_changeset_revison_column.py +lib/galaxy/model/migrate/versions/0089_add_object_store_id_columns.py +lib/galaxy/model/migrate/versions/0090_add_tool_shed_repository_table_columns.py +lib/galaxy/model/migrate/versions/0091_add_tool_version_tables.py +lib/galaxy/model/migrate/versions/0092_add_migrate_tools_table.py +lib/galaxy/model/migrate/versions/0093_add_job_params_col.py +lib/galaxy/model/migrate/versions/0094_add_job_handler_col.py +lib/galaxy/model/migrate/versions/0095_hda_subsets.py +lib/galaxy/model/migrate/versions/0096_openid_provider.py +lib/galaxy/model/migrate/versions/0097_add_ctx_rev_column.py +lib/galaxy/model/migrate/versions/0098_genome_index_tool_data_table.py +lib/galaxy/model/migrate/versions/0099_add_tool_dependency_table.py +lib/galaxy/model/migrate/versions/0100_alter_tool_dependency_table_version_column.py +lib/galaxy/model/migrate/versions/0101_drop_installed_changeset_revision_column.py +lib/galaxy/model/migrate/versions/0102_add_tool_dependency_status_columns.py +lib/galaxy/model/migrate/versions/0103_add_tool_shed_repository_status_columns.py +lib/galaxy/model/migrate/versions/0105_add_cleanup_event_table.py +lib/galaxy/model/migrate/versions/0106_add_missing_indexes.py +lib/galaxy/model/migrate/versions/0107_add_exit_code_to_job_and_task.py +lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py +lib/galaxy/model/migrate/versions/0109_add_repository_dependency_tables.py +lib/galaxy/model/migrate/versions/0110_add_dataset_uuid.py +lib/galaxy/model/migrate/versions/0111_add_job_destinations.py +lib/galaxy/model/migrate/versions/0112_add_data_manager_history_association_and_data_manager_job_association_tables.py +lib/galaxy/model/migrate/versions/0113_update_migrate_tools_table.py +lib/galaxy/model/migrate/versions/0114_update_migrate_tools_table_again.py +lib/galaxy/model/migrate/versions/0116_drop_update_available_col_add_tool_shed_status_col.py +lib/galaxy/model/migrate/versions/0117_add_user_activation.py +lib/galaxy/model/migrate/versions/0118_add_hda_extended_metadata.py +lib/galaxy/model/migrate/versions/0119_job_metrics.py +lib/galaxy/model/migrate/versions/0120_dataset_collections.py +lib/galaxy/model/migrate/versions/0121_workflow_uuids.py +lib/galaxy/model/migrate/versions/0122_grow_mysql_blobs.py +lib/galaxy/model/migrate/versions/0123_add_workflow_request_tables.py +lib/galaxy/model/migrate/versions/0124_job_state_history.py +lib/galaxy/model/migrate/versions/0125_workflow_step_tracking.py +lib/galaxy/model/migrate/versions/0126_password_reset.py +lib/galaxy/model/migrate/versions/0127_output_collection_adjustments.py +lib/galaxy/model/migrate/versions/0128_session_timeout.py +lib/galaxy/model/migrate/versions/0129_job_external_output_metadata_validity.py +lib/galaxy/model/migrate/versions/0130_change_pref_datatype.py +lib/galaxy/model/migrate/versions/0131_subworkflow_and_input_parameter_modules.py lib/galaxy/objectstore/ lib/galaxy/openid/ lib/galaxy/quota/ diff --git a/lib/galaxy/model/migrate/versions/0024_page_slug_unique_constraint.py b/lib/galaxy/model/migrate/versions/0024_page_slug_unique_constraint.py index 272e6b29f659..49529ef0f30a 100644 --- a/lib/galaxy/model/migrate/versions/0024_page_slug_unique_constraint.py +++ b/lib/galaxy/model/migrate/versions/0024_page_slug_unique_constraint.py @@ -14,7 +14,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() Page_table = Table( "page", metadata, autoload=True ) diff --git a/lib/galaxy/model/migrate/versions/0025_user_info.py b/lib/galaxy/model/migrate/versions/0025_user_info.py index c13a284e7069..68ea01c95792 100644 --- a/lib/galaxy/model/migrate/versions/0025_user_info.py +++ b/lib/galaxy/model/migrate/versions/0025_user_info.py @@ -22,9 +22,9 @@ def display_migration_details(): - print "========================================" - print "This script adds a foreign key to the form_values table in the galaxy_user table" - print "========================================" + print("========================================") + print("This script adds a foreign key to the form_values table in the galaxy_user table") + print("========================================") def upgrade(migrate_engine): diff --git a/lib/galaxy/model/migrate/versions/0026_cloud_tables.py b/lib/galaxy/model/migrate/versions/0026_cloud_tables.py index 816c1c00ef97..06a44729b6f8 100644 --- a/lib/galaxy/model/migrate/versions/0026_cloud_tables.py +++ b/lib/galaxy/model/migrate/versions/0026_cloud_tables.py @@ -10,9 +10,9 @@ def display_migration_details(): print - print "========================================" - print "This script adds tables needed for Galaxy cloud functionality." - print "========================================" + print("========================================") + print("This script adds tables needed for Galaxy cloud functionality.") + print("========================================") CloudImage_table = Table( "cloud_image", metadata, Column( "id", Integer, primary_key=True ), diff --git a/lib/galaxy/model/migrate/versions/0027_request_events.py b/lib/galaxy/model/migrate/versions/0027_request_events.py index 959fd58c7045..c4806ff64c4d 100644 --- a/lib/galaxy/model/migrate/versions/0027_request_events.py +++ b/lib/galaxy/model/migrate/versions/0027_request_events.py @@ -24,10 +24,10 @@ def display_migration_details(): - print "========================================" - print "This migration script adds the request_event table and" - print "removes the state field in the request table" - print "========================================" + print("========================================") + print("This migration script adds the request_event table and") + print("removes the state field in the request table") + print("========================================") RequestEvent_table = Table('request_event', metadata, diff --git a/lib/galaxy/model/migrate/versions/0028_external_metadata_file_override.py b/lib/galaxy/model/migrate/versions/0028_external_metadata_file_override.py index 03e29102517f..596c0d99b5b8 100644 --- a/lib/galaxy/model/migrate/versions/0028_external_metadata_file_override.py +++ b/lib/galaxy/model/migrate/versions/0028_external_metadata_file_override.py @@ -22,11 +22,11 @@ def display_migration_details(): - print "========================================" - print "This script adds the filename_override_metadata column to the JobExternalOutputMetadata table," - print" allowing existing metadata files to be written when using external metadata and a cluster" - print "set up with read-only access to database/files" - print "========================================" + print("========================================") + print("This script adds the filename_override_metadata column to the JobExternalOutputMetadata table,") + print(" allowing existing metadata files to be written when using external metadata and a cluster") + print("set up with read-only access to database/files") + print("========================================") def upgrade(migrate_engine): diff --git a/lib/galaxy/model/migrate/versions/0029_user_actions.py b/lib/galaxy/model/migrate/versions/0029_user_actions.py index 787b133e769d..ac546f9e1dec 100644 --- a/lib/galaxy/model/migrate/versions/0029_user_actions.py +++ b/lib/galaxy/model/migrate/versions/0029_user_actions.py @@ -12,9 +12,9 @@ def display_migration_details(): - print "" - print "This migration script adds a user actions table to Galaxy." - print "" + print("") + print("This migration script adds a user actions table to Galaxy.") + print("") # New table to store user actions. @@ -35,7 +35,7 @@ def upgrade(migrate_engine): try: UserAction_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating user_action table failed: %s" % str( e ) ) @@ -45,5 +45,5 @@ def downgrade(migrate_engine): try: UserAction_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping user_action table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0030_history_slug_column.py b/lib/galaxy/model/migrate/versions/0030_history_slug_column.py index 0aa97ae716e4..4a57937ef5d9 100644 --- a/lib/galaxy/model/migrate/versions/0030_history_slug_column.py +++ b/lib/galaxy/model/migrate/versions/0030_history_slug_column.py @@ -12,7 +12,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() History_table = Table( "history", metadata, autoload=True ) diff --git a/lib/galaxy/model/migrate/versions/0031_community_and_workflow_tags.py b/lib/galaxy/model/migrate/versions/0031_community_and_workflow_tags.py index 0edeb977be76..a7a4b737c54f 100644 --- a/lib/galaxy/model/migrate/versions/0031_community_and_workflow_tags.py +++ b/lib/galaxy/model/migrate/versions/0031_community_and_workflow_tags.py @@ -33,7 +33,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() # Create user_id column in history_tag_association table. @@ -45,7 +45,7 @@ def upgrade(migrate_engine): assert c is HistoryTagAssociation_table.c.user_id except Exception as e: # SQLite does not support 'ALTER TABLE ADD FOREIGN KEY', so catch exception if it arises. - print str(e) + print(str(e)) log.debug( "Adding user_id column to history_tag_association table failed: %s" % str( e ) ) else: c = Column( "user_id", Integer) @@ -54,7 +54,7 @@ def upgrade(migrate_engine): assert c is HistoryTagAssociation_table.c.user_id except Exception as e: # SQLite does not support 'ALTER TABLE ADD FOREIGN KEY', so catch exception if it arises. - print str(e) + print(str(e)) log.debug( "Adding user_id column to history_tag_association table failed: %s" % str( e ) ) # Populate column so that user_id is the id of the user who owns the history (and, up to now, was the only person able to tag the history). @@ -71,7 +71,7 @@ def upgrade(migrate_engine): assert c is HistoryDatasetAssociationTagAssociation_table.c.user_id except Exception as e: # SQLite does not support 'ALTER TABLE ADD FOREIGN KEY', so catch exception if it arises. - print str(e) + print(str(e)) log.debug( "Adding user_id column to history_dataset_association_tag_association table failed: %s" % str( e ) ) else: # In sqlite, we can no longer quietly fail to add foreign key. @@ -83,7 +83,7 @@ def upgrade(migrate_engine): assert c is HistoryDatasetAssociationTagAssociation_table.c.user_id except Exception as e: # SQLite does not support 'ALTER TABLE ADD FOREIGN KEY', so catch exception if it arises. - print str(e) + print(str(e)) log.debug( "Adding user_id column to history_dataset_association_tag_association table failed: %s" % str( e ) ) # Populate column so that user_id is the id of the user who owns the history_dataset_association (and, up to now, was the only person able to tag the page). @@ -99,7 +99,7 @@ def upgrade(migrate_engine): assert c is PageTagAssociation_table.c.user_id except Exception as e: # SQLite does not support 'ALTER TABLE ADD FOREIGN KEY', so catch exception if it arises. - print str(e) + print(str(e)) log.debug( "Adding user_id column to page_tag_association table failed: %s" % str( e ) ) else: # Create user_id column in page_tag_association table. @@ -110,7 +110,7 @@ def upgrade(migrate_engine): assert c is PageTagAssociation_table.c.user_id except Exception as e: # SQLite does not support 'ALTER TABLE ADD FOREIGN KEY', so catch exception if it arises. - print str(e) + print(str(e)) log.debug( "Adding user_id column to page_tag_association table failed: %s" % str( e ) ) # Populate column so that user_id is the id of the user who owns the page (and, up to now, was the only person able to tag the page). @@ -122,14 +122,14 @@ def upgrade(migrate_engine): try: StoredWorkflowTagAssociation_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating stored_workflow_tag_association table failed: %s" % str( e ) ) # Create workflow_tag_association table. try: WorkflowTagAssociation_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating workflow_tag_association table failed: %s" % str( e ) ) @@ -142,7 +142,7 @@ def downgrade(migrate_engine): try: HistoryTagAssociation_table.c.user_id.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping column user_id from history_tag_association table failed: %s" % str( e ) ) # Drop user_id column from history_dataset_association_tag_association table. @@ -150,7 +150,7 @@ def downgrade(migrate_engine): try: HistoryDatasetAssociationTagAssociation_table.c.user_id.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping column user_id from history_dataset_association_tag_association table failed: %s" % str( e ) ) # Drop user_id column from page_tag_association table. @@ -158,19 +158,19 @@ def downgrade(migrate_engine): try: PageTagAssociation_table.c.user_id.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping column user_id from page_tag_association table failed: %s" % str( e ) ) # Drop stored_workflow_tag_association table. try: StoredWorkflowTagAssociation_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping stored_workflow_tag_association table failed: %s" % str( e ) ) # Drop workflow_tag_association table. try: WorkflowTagAssociation_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping workflow_tag_association table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0032_stored_workflow_slug_column.py b/lib/galaxy/model/migrate/versions/0032_stored_workflow_slug_column.py index 331e61189ead..6adb4a4e9af6 100644 --- a/lib/galaxy/model/migrate/versions/0032_stored_workflow_slug_column.py +++ b/lib/galaxy/model/migrate/versions/0032_stored_workflow_slug_column.py @@ -12,7 +12,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() StoredWorkflow_table = Table( "stored_workflow", metadata, autoload=True ) diff --git a/lib/galaxy/model/migrate/versions/0033_published_cols_for_histories_and_workflows.py b/lib/galaxy/model/migrate/versions/0033_published_cols_for_histories_and_workflows.py index a56e7b7b067d..32509128bab4 100644 --- a/lib/galaxy/model/migrate/versions/0033_published_cols_for_histories_and_workflows.py +++ b/lib/galaxy/model/migrate/versions/0033_published_cols_for_histories_and_workflows.py @@ -12,7 +12,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() # Create published column in history table. @@ -22,7 +22,7 @@ def upgrade(migrate_engine): c.create( History_table, index_name='ix_history_published') assert c is History_table.c.published except Exception as e: - print "Adding published column to history table failed: %s" % str( e ) + print("Adding published column to history table failed: %s" % str( e )) log.debug( "Adding published column to history table failed: %s" % str( e ) ) if migrate_engine.name != 'sqlite': @@ -41,7 +41,7 @@ def upgrade(migrate_engine): c.create( StoredWorkflow_table, index_name='ix_stored_workflow_published') assert c is StoredWorkflow_table.c.published except Exception as e: - print "Adding published column to stored_workflow table failed: %s" % str( e ) + print("Adding published column to stored_workflow table failed: %s" % str( e )) log.debug( "Adding published column to stored_workflow table failed: %s" % str( e ) ) if migrate_engine.name != 'sqlite': @@ -60,7 +60,7 @@ def upgrade(migrate_engine): c.create( Page_table, index_name='ix_page_importable') assert c is Page_table.c.importable except Exception as e: - print "Adding importable column to page table failed: %s" % str( e ) + print("Adding importable column to page table failed: %s" % str( e )) log.debug( "Adding importable column to page table failed: %s" % str( e ) ) if migrate_engine.name != 'sqlite': @@ -82,7 +82,7 @@ def downgrade(migrate_engine): try: History_table.c.published.drop() except Exception as e: - print "Dropping column published from history table failed: %s" % str( e ) + print("Dropping column published from history table failed: %s" % str( e )) log.debug( "Dropping column published from history table failed: %s" % str( e ) ) # Drop published column from stored_workflow table. @@ -90,7 +90,7 @@ def downgrade(migrate_engine): try: StoredWorkflow_table.c.published.drop() except Exception as e: - print "Dropping column published from stored_workflow table failed: %s" % str( e ) + print("Dropping column published from stored_workflow table failed: %s" % str( e )) log.debug( "Dropping column published from stored_workflow table failed: %s" % str( e ) ) # Drop importable column from page table. @@ -98,5 +98,5 @@ def downgrade(migrate_engine): try: Page_table.c.importable.drop() except Exception as e: - print "Dropping column importable from page table failed: %s" % str( e ) + print("Dropping column importable from page table failed: %s" % str( e )) log.debug( "Dropping column importable from page table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0034_page_user_share_association.py b/lib/galaxy/model/migrate/versions/0034_page_user_share_association.py index 33bfe87b4f7a..fd2489017108 100644 --- a/lib/galaxy/model/migrate/versions/0034_page_user_share_association.py +++ b/lib/galaxy/model/migrate/versions/0034_page_user_share_association.py @@ -16,14 +16,14 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() # Create stored_workflow_tag_association table. try: PageUserShareAssociation_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating page_user_share_association table failed: %s" % str( e ) ) @@ -35,5 +35,5 @@ def downgrade(migrate_engine): try: PageUserShareAssociation_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping page_user_share_association table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0035_item_annotations_and_workflow_step_tags.py b/lib/galaxy/model/migrate/versions/0035_item_annotations_and_workflow_step_tags.py index cf26eb1e4355..3be86d4ef416 100644 --- a/lib/galaxy/model/migrate/versions/0035_item_annotations_and_workflow_step_tags.py +++ b/lib/galaxy/model/migrate/versions/0035_item_annotations_and_workflow_step_tags.py @@ -48,42 +48,42 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() # Create history_annotation_association table. try: HistoryAnnotationAssociation_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating history_annotation_association table failed: %s" % str( e ) ) # Create history_dataset_association_annotation_association table. try: HistoryDatasetAssociationAnnotationAssociation_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating history_dataset_association_annotation_association table failed: %s" % str( e ) ) # Create stored_workflow_annotation_association table. try: StoredWorkflowAnnotationAssociation_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating stored_workflow_annotation_association table failed: %s" % str( e ) ) # Create workflow_step_annotation_association table. try: WorkflowStepAnnotationAssociation_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating workflow_step_annotation_association table failed: %s" % str( e ) ) # Create workflow_step_tag_association table. try: WorkflowStepTagAssociation_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating workflow_step_tag_association table failed: %s" % str( e ) ) haaa = Index( "ix_history_anno_assoc_annotation", HistoryAnnotationAssociation_table.c.annotation, mysql_length=200) @@ -97,7 +97,7 @@ def upgrade(migrate_engine): swaaa.create() wsaaa.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating annotation indices failed: %s" % str( e ) ) @@ -109,33 +109,33 @@ def downgrade(migrate_engine): try: HistoryAnnotationAssociation_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping history_annotation_association table failed: %s" % str( e ) ) # Drop history_dataset_association_annotation_association table. try: HistoryDatasetAssociationAnnotationAssociation_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping history_dataset_association_annotation_association table failed: %s" % str( e ) ) # Drop stored_workflow_annotation_association table. try: StoredWorkflowAnnotationAssociation_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping stored_workflow_annotation_association table failed: %s" % str( e ) ) # Drop workflow_step_annotation_association table. try: WorkflowStepAnnotationAssociation_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping workflow_step_annotation_association table failed: %s" % str( e ) ) # Drop workflow_step_tag_association table. try: WorkflowStepTagAssociation_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping workflow_step_tag_association table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0036_add_deleted_column_to_library_template_assoc_tables.py b/lib/galaxy/model/migrate/versions/0036_add_deleted_column_to_library_template_assoc_tables.py index b24984dd7929..a13ed69a0bfc 100644 --- a/lib/galaxy/model/migrate/versions/0036_add_deleted_column_to_library_template_assoc_tables.py +++ b/lib/galaxy/model/migrate/versions/0036_add_deleted_column_to_library_template_assoc_tables.py @@ -19,7 +19,7 @@ def get_false_value(migrate_engine): def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() LibraryInfoAssociation_table = Table( "library_info_association", metadata, autoload=True ) diff --git a/lib/galaxy/model/migrate/versions/0037_samples_library.py b/lib/galaxy/model/migrate/versions/0037_samples_library.py index cb77bfa949c6..008a89c1747c 100644 --- a/lib/galaxy/model/migrate/versions/0037_samples_library.py +++ b/lib/galaxy/model/migrate/versions/0037_samples_library.py @@ -27,7 +27,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) # Load existing tables metadata.reflect() # retuest_type table diff --git a/lib/galaxy/model/migrate/versions/0038_add_inheritable_column_to_library_template_assoc_tables.py b/lib/galaxy/model/migrate/versions/0038_add_inheritable_column_to_library_template_assoc_tables.py index f319e3e91973..f2013829988e 100644 --- a/lib/galaxy/model/migrate/versions/0038_add_inheritable_column_to_library_template_assoc_tables.py +++ b/lib/galaxy/model/migrate/versions/0038_add_inheritable_column_to_library_template_assoc_tables.py @@ -25,7 +25,7 @@ def get_false_value(migrate_engine): def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) # # In case of sqlite, check if the previous migration script deleted the diff --git a/lib/galaxy/model/migrate/versions/0039_add_synopsis_column_to_library_table.py b/lib/galaxy/model/migrate/versions/0039_add_synopsis_column_to_library_table.py index aafe6500febb..45afb6d0d055 100644 --- a/lib/galaxy/model/migrate/versions/0039_add_synopsis_column_to_library_table.py +++ b/lib/galaxy/model/migrate/versions/0039_add_synopsis_column_to_library_table.py @@ -11,7 +11,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() Library_table = Table( "library", metadata, autoload=True ) diff --git a/lib/galaxy/model/migrate/versions/0040_page_annotations.py b/lib/galaxy/model/migrate/versions/0040_page_annotations.py index eac206c67239..eb9c3ee0106c 100644 --- a/lib/galaxy/model/migrate/versions/0040_page_annotations.py +++ b/lib/galaxy/model/migrate/versions/0040_page_annotations.py @@ -17,14 +17,14 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() # Create history_annotation_association table. try: PageAnnotationAssociation_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating page_annotation_association table failed: %s" % str( e ) ) @@ -36,5 +36,5 @@ def downgrade(migrate_engine): try: PageAnnotationAssociation_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping page_annotation_association table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0041_workflow_invocation.py b/lib/galaxy/model/migrate/versions/0041_workflow_invocation.py index ed4b29e0bfea..27a5112d58ac 100644 --- a/lib/galaxy/model/migrate/versions/0041_workflow_invocation.py +++ b/lib/galaxy/model/migrate/versions/0041_workflow_invocation.py @@ -30,7 +30,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() for table in tables: diff --git a/lib/galaxy/model/migrate/versions/0042_workflow_invocation_fix.py b/lib/galaxy/model/migrate/versions/0042_workflow_invocation_fix.py index a19598ef2107..d95969445f8f 100644 --- a/lib/galaxy/model/migrate/versions/0042_workflow_invocation_fix.py +++ b/lib/galaxy/model/migrate/versions/0042_workflow_invocation_fix.py @@ -13,7 +13,7 @@ def upgrade(migrate_engine): - print __doc__ + print(__doc__) metadata.bind = migrate_engine metadata.reflect() diff --git a/lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py b/lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py index a1313a8cfc08..1f8878d3c376 100644 --- a/lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py +++ b/lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py @@ -37,7 +37,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() Visualiation_table = Table( "visualization", metadata, autoload=True ) @@ -45,7 +45,7 @@ def upgrade(migrate_engine): try: VisualizationUserShareAssociation_table.create() except Exception as e: - print "Creating visualization_user_share_association table failed: %s" % str( e ) + print("Creating visualization_user_share_association table failed: %s" % str( e )) log.debug( "Creating visualization_user_share_association table failed: %s" % str( e ) ) # Get default boolean value 'false' so that columns can be initialized. @@ -69,7 +69,7 @@ def upgrade(migrate_engine): cmd = "UPDATE visualization SET deleted = %s" % default_false migrate_engine.execute( cmd ) except Exception as e: - print "Adding deleted column to visualization table failed: %s" % str( e ) + print("Adding deleted column to visualization table failed: %s" % str( e )) log.debug( "Adding deleted column to visualization table failed: %s" % str( e ) ) try: @@ -81,14 +81,14 @@ def upgrade(migrate_engine): cmd = "UPDATE visualization SET importable = %s" % default_false migrate_engine.execute( cmd ) except Exception as e: - print "Adding importable column to visualization table failed: %s" % str( e ) + print("Adding importable column to visualization table failed: %s" % str( e )) log.debug( "Adding importable column to visualization table failed: %s" % str( e ) ) try: slug_column.create( Visualiation_table ) assert slug_column is Visualiation_table.c.slug except Exception as e: - print "Adding slug column to visualization table failed: %s" % str( e ) + print("Adding slug column to visualization table failed: %s" % str( e )) log.debug( "Adding slug column to visualization table failed: %s" % str( e ) ) try: @@ -100,7 +100,7 @@ def upgrade(migrate_engine): i = Index( "ix_visualization_slug", Visualiation_table.c.slug ) i.create() except Exception as e: - print "Adding index 'ix_visualization_slug' failed: %s" % str( e ) + print("Adding index 'ix_visualization_slug' failed: %s" % str( e )) log.debug( "Adding index 'ix_visualization_slug' failed: %s" % str( e ) ) try: @@ -112,21 +112,21 @@ def upgrade(migrate_engine): cmd = "UPDATE visualization SET published = %s" % default_false migrate_engine.execute( cmd ) except Exception as e: - print "Adding published column to visualization table failed: %s" % str( e ) + print("Adding published column to visualization table failed: %s" % str( e )) log.debug( "Adding published column to visualization table failed: %s" % str( e ) ) # Create visualization_tag_association table. try: VisualizationTagAssociation_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating visualization_tag_association table failed: %s" % str( e ) ) # Create visualization_annotation_association table. try: VisualizationAnnotationAssociation_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating visualization_annotation_association table failed: %s" % str( e ) ) # Need to create index for visualization annotation manually to deal with errors. @@ -139,7 +139,7 @@ def upgrade(migrate_engine): i = Index( "ix_visualization_annotation_association_annotation", VisualizationAnnotationAssociation_table.c.annotation ) i.create() except Exception as e: - print "Adding index 'ix_visualization_annotation_association_annotation' failed: %s" % str( e ) + print("Adding index 'ix_visualization_annotation_association_annotation' failed: %s" % str( e )) log.debug( "Adding index 'ix_visualization_annotation_association_annotation' failed: %s" % str( e ) ) @@ -152,44 +152,44 @@ def downgrade(migrate_engine): try: VisualizationUserShareAssociation_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping visualization_user_share_association table failed: %s" % str( e ) ) # Drop columns for supporting sharing from visualization table. try: Visualiation_table.c.deleted.drop() except Exception as e: - print "Dropping deleted column from visualization table failed: %s" % str( e ) + print("Dropping deleted column from visualization table failed: %s" % str( e )) log.debug( "Dropping deleted column from visualization table failed: %s" % str( e ) ) try: Visualiation_table.c.importable.drop() except Exception as e: - print "Dropping importable column from visualization table failed: %s" % str( e ) + print("Dropping importable column from visualization table failed: %s" % str( e )) log.debug( "Dropping importable column from visualization table failed: %s" % str( e ) ) try: Visualiation_table.c.slug.drop() except Exception as e: - print "Dropping slug column from visualization table failed: %s" % str( e ) + print("Dropping slug column from visualization table failed: %s" % str( e )) log.debug( "Dropping slug column from visualization table failed: %s" % str( e ) ) try: Visualiation_table.c.published.drop() except Exception as e: - print "Dropping published column from visualization table failed: %s" % str( e ) + print("Dropping published column from visualization table failed: %s" % str( e )) log.debug( "Dropping published column from visualization table failed: %s" % str( e ) ) # Drop visualization_tag_association table. try: VisualizationTagAssociation_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping visualization_tag_association table failed: %s" % str( e ) ) # Drop visualization_annotation_association table. try: VisualizationAnnotationAssociation_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping visualization_annotation_association table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0044_add_notify_column_to_request_table.py b/lib/galaxy/model/migrate/versions/0044_add_notify_column_to_request_table.py index e887f69666fc..61a5cd2d2a7d 100644 --- a/lib/galaxy/model/migrate/versions/0044_add_notify_column_to_request_table.py +++ b/lib/galaxy/model/migrate/versions/0044_add_notify_column_to_request_table.py @@ -11,7 +11,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() Request_table = Table( "request", metadata, autoload=True ) diff --git a/lib/galaxy/model/migrate/versions/0045_request_type_permissions_table.py b/lib/galaxy/model/migrate/versions/0045_request_type_permissions_table.py index 50701c25d6ee..300eb8660ae5 100644 --- a/lib/galaxy/model/migrate/versions/0045_request_type_permissions_table.py +++ b/lib/galaxy/model/migrate/versions/0045_request_type_permissions_table.py @@ -21,7 +21,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: RequestTypePermissions_table.create() diff --git a/lib/galaxy/model/migrate/versions/0046_post_job_actions.py b/lib/galaxy/model/migrate/versions/0046_post_job_actions.py index 4cb8d49d4b64..7a04f453a7ce 100644 --- a/lib/galaxy/model/migrate/versions/0046_post_job_actions.py +++ b/lib/galaxy/model/migrate/versions/0046_post_job_actions.py @@ -31,7 +31,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() for table in tables: try: diff --git a/lib/galaxy/model/migrate/versions/0047_job_table_user_id_column.py b/lib/galaxy/model/migrate/versions/0047_job_table_user_id_column.py index 5fcc597e7d20..d50517e0cf22 100644 --- a/lib/galaxy/model/migrate/versions/0047_job_table_user_id_column.py +++ b/lib/galaxy/model/migrate/versions/0047_job_table_user_id_column.py @@ -22,7 +22,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: Job_table = Table( "job", metadata, autoload=True ) @@ -51,17 +51,17 @@ def upgrade(migrate_engine): + "FROM job " \ + "JOIN galaxy_session ON job.session_id = galaxy_session.id;" job_users = migrate_engine.execute( cmd ).fetchall() - print "Updating user_id column in job table for ", len( job_users ), " rows..." - print "" + print("Updating user_id column in job table for ", len( job_users ), " rows...") + print("") update_count = 0 for row in job_users: if row.galaxy_user_id: cmd = "UPDATE job SET user_id = %d WHERE id = %d" % ( int( row.galaxy_user_id ), int( row.galaxy_job_id ) ) update_count += 1 migrate_engine.execute( cmd ) - print "Updated the user_id column for ", update_count, " rows in the job table. " - print len( job_users ) - update_count, " rows have no user_id since the value was NULL in the galaxy_session table." - print "" + print("Updated the user_id column for ", update_count, " rows in the job table. ") + print(len( job_users ) - update_count, " rows have no user_id since the value was NULL in the galaxy_session table.") + print("") except Exception as e: log.debug( "Updating job.user_id column failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0048_dataset_instance_state_column.py b/lib/galaxy/model/migrate/versions/0048_dataset_instance_state_column.py index 77ea2dd7c307..a1a66d68bb81 100644 --- a/lib/galaxy/model/migrate/versions/0048_dataset_instance_state_column.py +++ b/lib/galaxy/model/migrate/versions/0048_dataset_instance_state_column.py @@ -26,7 +26,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() dataset_instance_tables = [] for table_name in DATASET_INSTANCE_TABLE_NAMES: diff --git a/lib/galaxy/model/migrate/versions/0049_api_keys_table.py b/lib/galaxy/model/migrate/versions/0049_api_keys_table.py index 8f04cf7eca7a..0696a0d04efb 100644 --- a/lib/galaxy/model/migrate/versions/0049_api_keys_table.py +++ b/lib/galaxy/model/migrate/versions/0049_api_keys_table.py @@ -21,7 +21,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: APIKeys_table.create() diff --git a/lib/galaxy/model/migrate/versions/0050_drop_cloud_tables.py b/lib/galaxy/model/migrate/versions/0050_drop_cloud_tables.py index ca252dde5bf0..1e794fd5afab 100644 --- a/lib/galaxy/model/migrate/versions/0050_drop_cloud_tables.py +++ b/lib/galaxy/model/migrate/versions/0050_drop_cloud_tables.py @@ -10,9 +10,9 @@ def display_migration_details(): print - print "========================================" - print "This script drops tables that were associated with the old Galaxy Cloud functionality." - print "========================================" + print("========================================") + print("This script drops tables that were associated with the old Galaxy Cloud functionality.") + print("========================================") CloudImage_table = Table( "cloud_image", metadata, Column( "id", Integer, primary_key=True ), diff --git a/lib/galaxy/model/migrate/versions/0051_imported_col_for_jobs_table.py b/lib/galaxy/model/migrate/versions/0051_imported_col_for_jobs_table.py index 6a0bc3d4a171..ecb5ced6f6dd 100644 --- a/lib/galaxy/model/migrate/versions/0051_imported_col_for_jobs_table.py +++ b/lib/galaxy/model/migrate/versions/0051_imported_col_for_jobs_table.py @@ -11,7 +11,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() # Create and initialize imported column in job table. @@ -30,7 +30,7 @@ def upgrade(migrate_engine): migrate_engine.execute( "UPDATE job SET imported=%s" % default_false ) except Exception as e: - print "Adding imported column to job table failed: %s" % str( e ) + print("Adding imported column to job table failed: %s" % str( e )) log.debug( "Adding imported column to job table failed: %s" % str( e ) ) @@ -43,5 +43,5 @@ def downgrade(migrate_engine): try: Jobs_table.c.imported.drop() except Exception as e: - print "Dropping column imported from job table failed: %s" % str( e ) + print("Dropping column imported from job table failed: %s" % str( e )) log.debug( "Dropping column imported from job table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0052_sample_dataset_table.py b/lib/galaxy/model/migrate/versions/0052_sample_dataset_table.py index d3c8f9236f4e..7781a19b689c 100644 --- a/lib/galaxy/model/migrate/versions/0052_sample_dataset_table.py +++ b/lib/galaxy/model/migrate/versions/0052_sample_dataset_table.py @@ -47,7 +47,7 @@ def localtimestamp(migrate_engine): def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: SampleDataset_table.create() diff --git a/lib/galaxy/model/migrate/versions/0053_item_ratings.py b/lib/galaxy/model/migrate/versions/0053_item_ratings.py index 9d4ae1341c5c..688ebc83d05f 100644 --- a/lib/galaxy/model/migrate/versions/0053_item_ratings.py +++ b/lib/galaxy/model/migrate/versions/0053_item_ratings.py @@ -43,14 +43,14 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() # Create history_rating_association table. try: HistoryRatingAssociation_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating history_rating_association table failed: %s" % str( e ) ) # Create history_dataset_association_rating_association table. @@ -64,31 +64,31 @@ def upgrade(migrate_engine): try: i.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Adding index 'ix_hda_rating_association_hda_id' to table 'history_dataset_association_rating_association' table failed: %s" % str( e ) ) else: - print str(e) + print(str(e)) log.debug( "Creating history_dataset_association_rating_association table failed: %s" % str( e ) ) # Create stored_workflow_rating_association table. try: StoredWorkflowRatingAssociation_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating stored_workflow_rating_association table failed: %s" % str( e ) ) # Create page_rating_association table. try: PageRatingAssociation_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating page_rating_association table failed: %s" % str( e ) ) # Create visualization_rating_association table. try: VisualizationRatingAssociation_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating visualization_rating_association table failed: %s" % str( e ) ) @@ -100,33 +100,33 @@ def downgrade(migrate_engine): try: HistoryRatingAssociation_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping history_rating_association table failed: %s" % str( e ) ) # Drop history_dataset_association_rating_association table. try: HistoryDatasetAssociationRatingAssociation_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping history_dataset_association_rating_association table failed: %s" % str( e ) ) # Drop stored_workflow_rating_association table. try: StoredWorkflowRatingAssociation_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping stored_workflow_rating_association table failed: %s" % str( e ) ) # Drop page_rating_association table. try: PageRatingAssociation_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping page_rating_association table failed: %s" % str( e ) ) # Drop visualization_rating_association table. try: VisualizationRatingAssociation_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping visualization_rating_association table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0054_visualization_dbkey.py b/lib/galaxy/model/migrate/versions/0054_visualization_dbkey.py index a60768336f5f..d9f6be19067f 100644 --- a/lib/galaxy/model/migrate/versions/0054_visualization_dbkey.py +++ b/lib/galaxy/model/migrate/versions/0054_visualization_dbkey.py @@ -13,7 +13,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() Visualization_table = Table( "visualization", metadata, autoload=True ) diff --git a/lib/galaxy/model/migrate/versions/0055_add_pja_assoc_for_jobs.py b/lib/galaxy/model/migrate/versions/0055_add_pja_assoc_for_jobs.py index 6b19902f2528..da4c58f9d59b 100644 --- a/lib/galaxy/model/migrate/versions/0055_add_pja_assoc_for_jobs.py +++ b/lib/galaxy/model/migrate/versions/0055_add_pja_assoc_for_jobs.py @@ -18,7 +18,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: PostJobActionAssociation_table.create() diff --git a/lib/galaxy/model/migrate/versions/0056_workflow_outputs.py b/lib/galaxy/model/migrate/versions/0056_workflow_outputs.py index 8bbc1f0bf552..4f2cf97425a1 100644 --- a/lib/galaxy/model/migrate/versions/0056_workflow_outputs.py +++ b/lib/galaxy/model/migrate/versions/0056_workflow_outputs.py @@ -20,7 +20,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() for table in tables: try: diff --git a/lib/galaxy/model/migrate/versions/0057_request_notify.py b/lib/galaxy/model/migrate/versions/0057_request_notify.py index 711a352fff83..07c382a70e14 100644 --- a/lib/galaxy/model/migrate/versions/0057_request_notify.py +++ b/lib/galaxy/model/migrate/versions/0057_request_notify.py @@ -18,7 +18,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: Request_table = Table( "request", metadata, autoload=True ) diff --git a/lib/galaxy/model/migrate/versions/0058_history_import_export.py b/lib/galaxy/model/migrate/versions/0058_history_import_export.py index ddce22ce81ac..f6c4953e5f6d 100644 --- a/lib/galaxy/model/migrate/versions/0058_history_import_export.py +++ b/lib/galaxy/model/migrate/versions/0058_history_import_export.py @@ -22,7 +22,7 @@ def upgrade(migrate_engine): - print __doc__ + print(__doc__) metadata.bind = migrate_engine metadata.reflect() @@ -30,7 +30,7 @@ def upgrade(migrate_engine): try: JobExportHistoryArchive_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating job_export_history_archive table failed: %s" % str( e ) ) @@ -42,5 +42,5 @@ def downgrade(migrate_engine): try: JobExportHistoryArchive_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping job_export_history_archive table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0059_sample_dataset_file_path.py b/lib/galaxy/model/migrate/versions/0059_sample_dataset_file_path.py index c03d59129f64..47c2cad2687c 100644 --- a/lib/galaxy/model/migrate/versions/0059_sample_dataset_file_path.py +++ b/lib/galaxy/model/migrate/versions/0059_sample_dataset_file_path.py @@ -15,7 +15,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: SampleDataset_table = Table( "sample_dataset", metadata, autoload=True ) diff --git a/lib/galaxy/model/migrate/versions/0060_history_archive_import.py b/lib/galaxy/model/migrate/versions/0060_history_archive_import.py index d6f7a815fa9a..64170b5602ef 100644 --- a/lib/galaxy/model/migrate/versions/0060_history_archive_import.py +++ b/lib/galaxy/model/migrate/versions/0060_history_archive_import.py @@ -25,7 +25,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() # Add column to history table and initialize. @@ -41,7 +41,7 @@ def upgrade(migrate_engine): default_false = "false" migrate_engine.execute( "UPDATE history SET importing=%s" % default_false ) except Exception as e: - print str(e) + print(str(e)) log.debug( "Adding column 'importing' to history table failed: %s" % str( e ) ) # Create job_import_history_archive table. diff --git a/lib/galaxy/model/migrate/versions/0061_tasks.py b/lib/galaxy/model/migrate/versions/0061_tasks.py index 39c695d5a39e..cec0c5d1e8b0 100644 --- a/lib/galaxy/model/migrate/versions/0061_tasks.py +++ b/lib/galaxy/model/migrate/versions/0061_tasks.py @@ -32,7 +32,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() for table in tables: try: diff --git a/lib/galaxy/model/migrate/versions/0062_user_openid_table.py b/lib/galaxy/model/migrate/versions/0062_user_openid_table.py index db8e25fefd2a..59f80f975ad4 100644 --- a/lib/galaxy/model/migrate/versions/0062_user_openid_table.py +++ b/lib/galaxy/model/migrate/versions/0062_user_openid_table.py @@ -24,7 +24,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() # Create galaxy_user_openid table diff --git a/lib/galaxy/model/migrate/versions/0063_sequencer_table.py b/lib/galaxy/model/migrate/versions/0063_sequencer_table.py index f1d2b39ddc7b..77a57db0a61c 100644 --- a/lib/galaxy/model/migrate/versions/0063_sequencer_table.py +++ b/lib/galaxy/model/migrate/versions/0063_sequencer_table.py @@ -29,7 +29,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() # create the sequencer table try: diff --git a/lib/galaxy/model/migrate/versions/0064_add_run_and_sample_run_association_tables.py b/lib/galaxy/model/migrate/versions/0064_add_run_and_sample_run_association_tables.py index 9233e7a7cb8c..d46da866a6ec 100644 --- a/lib/galaxy/model/migrate/versions/0064_add_run_and_sample_run_association_tables.py +++ b/lib/galaxy/model/migrate/versions/0064_add_run_and_sample_run_association_tables.py @@ -31,7 +31,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: Run_table.create() diff --git a/lib/galaxy/model/migrate/versions/0065_add_name_to_form_fields_and_values.py b/lib/galaxy/model/migrate/versions/0065_add_name_to_form_fields_and_values.py index 792cc579f9ec..dc10e46ab97f 100644 --- a/lib/galaxy/model/migrate/versions/0065_add_name_to_form_fields_and_values.py +++ b/lib/galaxy/model/migrate/versions/0065_add_name_to_form_fields_and_values.py @@ -25,7 +25,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: Table( "form_definition", metadata, autoload=True ) diff --git a/lib/galaxy/model/migrate/versions/0066_deferred_job_and_transfer_job_tables.py b/lib/galaxy/model/migrate/versions/0066_deferred_job_and_transfer_job_tables.py index 60855fc56a22..09d787964b38 100644 --- a/lib/galaxy/model/migrate/versions/0066_deferred_job_and_transfer_job_tables.py +++ b/lib/galaxy/model/migrate/versions/0066_deferred_job_and_transfer_job_tables.py @@ -34,7 +34,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() # Create deferred_job table diff --git a/lib/galaxy/model/migrate/versions/0067_populate_sequencer_table.py b/lib/galaxy/model/migrate/versions/0067_populate_sequencer_table.py index 88622f60262f..908edb319d61 100644 --- a/lib/galaxy/model/migrate/versions/0067_populate_sequencer_table.py +++ b/lib/galaxy/model/migrate/versions/0067_populate_sequencer_table.py @@ -189,7 +189,7 @@ def update_sequencer_id_in_request_type( migrate_engine, request_type_id, sequen def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: RequestType_table = Table( "request_type", metadata, autoload=True ) diff --git a/lib/galaxy/model/migrate/versions/0068_rename_sequencer_to_external_services.py b/lib/galaxy/model/migrate/versions/0068_rename_sequencer_to_external_services.py index 6514438dd6f4..7c98bf76353b 100644 --- a/lib/galaxy/model/migrate/versions/0068_rename_sequencer_to_external_services.py +++ b/lib/galaxy/model/migrate/versions/0068_rename_sequencer_to_external_services.py @@ -32,7 +32,7 @@ def nextval( migrate_engine, table, col='id' ): def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) # Load existing tables metadata.reflect() # add a foreign key to the external_service table in the sample_dataset table diff --git a/lib/galaxy/model/migrate/versions/0069_rename_sequencer_form_type.py b/lib/galaxy/model/migrate/versions/0069_rename_sequencer_form_type.py index c92c7a554bb2..693e00118c43 100644 --- a/lib/galaxy/model/migrate/versions/0069_rename_sequencer_form_type.py +++ b/lib/galaxy/model/migrate/versions/0069_rename_sequencer_form_type.py @@ -13,7 +13,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() current_form_type = 'Sequencer Information Form' new_form_type = "External Service Information Form" diff --git a/lib/galaxy/model/migrate/versions/0070_add_info_column_to_deferred_job_table.py b/lib/galaxy/model/migrate/versions/0070_add_info_column_to_deferred_job_table.py index 5b37a99777a3..74f96c36d0da 100644 --- a/lib/galaxy/model/migrate/versions/0070_add_info_column_to_deferred_job_table.py +++ b/lib/galaxy/model/migrate/versions/0070_add_info_column_to_deferred_job_table.py @@ -11,7 +11,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: TransferJob_table = Table( "transfer_job", metadata, autoload=True ) @@ -19,7 +19,7 @@ def upgrade(migrate_engine): c.create( TransferJob_table ) assert c is TransferJob_table.c.info except Exception as e: - print "Adding info column to transfer_job table failed: %s" % str( e ) + print("Adding info column to transfer_job table failed: %s" % str( e )) log.debug( "Adding info column to transfer_job table failed: %s" % str( e ) ) @@ -30,5 +30,5 @@ def downgrade(migrate_engine): TransferJob_table = Table( "transfer_job", metadata, autoload=True ) TransferJob_table.c.info.drop() except Exception as e: - print "Dropping info column from transfer_job table failed: %s" % str( e ) + print("Dropping info column from transfer_job table failed: %s" % str( e )) log.debug( "Dropping info column from transfer_job table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0071_add_history_and_workflow_to_sample.py b/lib/galaxy/model/migrate/versions/0071_add_history_and_workflow_to_sample.py index 15f60026a78b..b10ed120f1f1 100644 --- a/lib/galaxy/model/migrate/versions/0071_add_history_and_workflow_to_sample.py +++ b/lib/galaxy/model/migrate/versions/0071_add_history_and_workflow_to_sample.py @@ -13,7 +13,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: Sample_table = Table( "sample", metadata, autoload=True ) @@ -24,7 +24,7 @@ def upgrade(migrate_engine): assert c1 is Sample_table.c.workflow assert c2 is Sample_table.c.history_id except Exception as e: - print "Adding history and workflow columns to sample table failed: %s" % str( e ) + print("Adding history and workflow columns to sample table failed: %s" % str( e )) log.debug( "Adding history and workflow columns to sample table failed: %s" % str( e ) ) @@ -35,11 +35,11 @@ def downgrade(migrate_engine): Sample_table = Table( "sample", metadata, autoload=True ) Sample_table.c.workflow.drop() except Exception as e: - print "Dropping workflow column from sample table failed: %s" % str( e ) + print("Dropping workflow column from sample table failed: %s" % str( e )) log.debug( "Dropping workflow column from sample table failed: %s" % str( e ) ) try: Sample_table = Table( "sample", metadata, autoload=True ) Sample_table.c.history_id.drop() except Exception as e: - print "Dropping history column from sample table failed: %s" % str( e ) + print("Dropping history column from sample table failed: %s" % str( e )) log.debug( "Dropping history column from sample table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0072_add_pid_and_socket_columns_to_transfer_job_table.py b/lib/galaxy/model/migrate/versions/0072_add_pid_and_socket_columns_to_transfer_job_table.py index 19c9531f62e9..1399ab5606ae 100644 --- a/lib/galaxy/model/migrate/versions/0072_add_pid_and_socket_columns_to_transfer_job_table.py +++ b/lib/galaxy/model/migrate/versions/0072_add_pid_and_socket_columns_to_transfer_job_table.py @@ -11,7 +11,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: TransferJob_table = Table( "transfer_job", metadata, autoload=True ) @@ -22,7 +22,7 @@ def upgrade(migrate_engine): c.create( TransferJob_table ) assert c is TransferJob_table.c.socket except Exception as e: - print "Adding columns to transfer_job table failed: %s" % str( e ) + print("Adding columns to transfer_job table failed: %s" % str( e )) log.debug( "Adding columns to transfer_job table failed: %s" % str( e ) ) @@ -34,5 +34,5 @@ def downgrade(migrate_engine): TransferJob_table.c.pid.drop() TransferJob_table.c.socket.drop() except Exception as e: - print "Dropping columns from transfer_job table failed: %s" % str( e ) + print("Dropping columns from transfer_job table failed: %s" % str( e )) log.debug( "Dropping columns from transfer_job table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0073_add_ldda_to_implicit_conversion_table.py b/lib/galaxy/model/migrate/versions/0073_add_ldda_to_implicit_conversion_table.py index c250ed898b13..a786d281f01b 100644 --- a/lib/galaxy/model/migrate/versions/0073_add_ldda_to_implicit_conversion_table.py +++ b/lib/galaxy/model/migrate/versions/0073_add_ldda_to_implicit_conversion_table.py @@ -11,7 +11,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: Implicitly_converted_table = Table( "implicitly_converted_dataset_association", metadata, autoload=True ) @@ -23,7 +23,7 @@ def upgrade(migrate_engine): c.create( Implicitly_converted_table, index_name="ix_implicitly_converted_dataset_assoc_ldda_parent_id") assert c is Implicitly_converted_table.c.ldda_parent_id except Exception as e: - print "Adding ldda_parent_id column to implicitly_converted_dataset_association table failed: %s" % str( e ) + print("Adding ldda_parent_id column to implicitly_converted_dataset_association table failed: %s" % str( e )) log.debug( "Adding ldda_parent_id column to implicitly_converted_dataset_association table failed: %s" % str( e ) ) @@ -34,5 +34,5 @@ def downgrade(migrate_engine): Implicitly_converted_table = Table( "implicitly_converted_dataset_association", metadata, autoload=True ) Implicitly_converted_table.c.ldda_parent_id.drop() except Exception as e: - print "Dropping ldda_parent_id column from implicitly_converted_dataset_association table failed: %s" % str( e ) + print("Dropping ldda_parent_id column from implicitly_converted_dataset_association table failed: %s" % str( e )) log.debug( "Dropping ldda_parent_id column from implicitly_converted_dataset_association table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0074_add_purged_column_to_library_dataset_table.py b/lib/galaxy/model/migrate/versions/0074_add_purged_column_to_library_dataset_table.py index 6ca67a53b047..91f8255e88f3 100644 --- a/lib/galaxy/model/migrate/versions/0074_add_purged_column_to_library_dataset_table.py +++ b/lib/galaxy/model/migrate/versions/0074_add_purged_column_to_library_dataset_table.py @@ -29,7 +29,7 @@ def boolean_true(migrate_engine): def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: LibraryDataset_table = Table( "library_dataset", metadata, autoload=True ) @@ -37,7 +37,7 @@ def upgrade(migrate_engine): c.create( LibraryDataset_table, index_name='ix_library_dataset_purged') assert c is LibraryDataset_table.c.purged except Exception as e: - print "Adding purged column to library_dataset table failed: ", str( e ) + print("Adding purged column to library_dataset table failed: ", str( e )) # Update the purged flag to the default False cmd = "UPDATE library_dataset SET purged = %s;" % boolean_false(migrate_engine) try: @@ -53,7 +53,7 @@ def upgrade(migrate_engine): cmd = "SELECT * FROM library_dataset_dataset_association WHERE library_dataset_id = %d AND library_dataset_dataset_association.deleted = %s;" % ( int( row.id ), boolean_false(migrate_engine) ) active_lddas = migrate_engine.execute( cmd ).fetchall() if not active_lddas: - print "Updating purged column to True for LibraryDataset id : ", int( row.id ) + print("Updating purged column to True for LibraryDataset id : ", int( row.id )) cmd = "UPDATE library_dataset SET purged = %s WHERE id = %d;" % ( boolean_true(migrate_engine), int( row.id ) ) migrate_engine.execute( cmd ) @@ -65,4 +65,4 @@ def downgrade(migrate_engine): LibraryDataset_table = Table( "library_dataset", metadata, autoload=True ) LibraryDataset_table.c.purged.drop() except Exception as e: - print "Dropping purged column from library_dataset table failed: ", str( e ) + print("Dropping purged column from library_dataset table failed: ", str( e )) diff --git a/lib/galaxy/model/migrate/versions/0075_add_subindex_column_to_run_table.py b/lib/galaxy/model/migrate/versions/0075_add_subindex_column_to_run_table.py index 6a619c496bf4..51bef48e44c8 100644 --- a/lib/galaxy/model/migrate/versions/0075_add_subindex_column_to_run_table.py +++ b/lib/galaxy/model/migrate/versions/0075_add_subindex_column_to_run_table.py @@ -10,7 +10,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: Run_table = Table( "run", metadata, autoload=True ) @@ -18,7 +18,7 @@ def upgrade(migrate_engine): c.create( Run_table, index_name="ix_run_subindex") assert c is Run_table.c.subindex except Exception as e: - print "Adding the subindex column to the run table failed: ", str( e ) + print("Adding the subindex column to the run table failed: ", str( e )) def downgrade(migrate_engine): @@ -28,4 +28,4 @@ def downgrade(migrate_engine): Run_table = Table( "run", metadata, autoload=True ) Run_table.c.subindex.drop() except Exception as e: - print "Dropping the subindex column from run table failed: ", str( e ) + print("Dropping the subindex column from run table failed: ", str( e )) diff --git a/lib/galaxy/model/migrate/versions/0076_fix_form_values_data_corruption.py b/lib/galaxy/model/migrate/versions/0076_fix_form_values_data_corruption.py index bc90ec4769ae..defd24d625b5 100644 --- a/lib/galaxy/model/migrate/versions/0076_fix_form_values_data_corruption.py +++ b/lib/galaxy/model/migrate/versions/0076_fix_form_values_data_corruption.py @@ -15,7 +15,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() cmd = "SELECT form_values.id as id, form_values.content as field_values, form_definition.fields as fdfields " \ + " FROM form_definition, form_values " \ @@ -35,8 +35,8 @@ def upgrade(migrate_engine): field_values_str = _sniffnfix_pg9_hex( str( row['field_values'] ) ) try: # Encoding errors? Just to be safe. - print "Attempting to fix row %s" % row['id'] - print "Prior to replacement: %s" % field_values_str + print("Attempting to fix row %s" % row['id']) + print("Prior to replacement: %s" % field_values_str) except: pass field_values_dict = {} @@ -48,7 +48,7 @@ def upgrade(migrate_engine): if field_index == -1: # if the field name is not present the field values dict then # inform the admin that this form values cannot be fixed - print "The 'content' field of row 'id' %i does not have the field '%s' in the 'form_values' table and could not be fixed by this migration script." % ( int( field['id'] ), field['name'] ) + print("The 'content' field of row 'id' %i does not have the field '%s' in the 'form_values' table and could not be fixed by this migration script." % ( int( field['id'] ), field['name'] )) else: # check if this is the last field if index == len( fields_list ) - 1: @@ -74,13 +74,13 @@ def upgrade(migrate_engine): cmd = "UPDATE form_values SET content='%s' WHERE id=%i" % ( json_values, int( row['id'] ) ) migrate_engine.execute( cmd ) try: - print "Post replacement: %s" % json_values + print("Post replacement: %s" % json_values) except: pass if corrupted_rows: - print 'Fixed %i corrupted rows.' % corrupted_rows + print('Fixed %i corrupted rows.' % corrupted_rows) else: - print 'No corrupted rows found.' + print('No corrupted rows found.') def downgrade(migrate_engine): diff --git a/lib/galaxy/model/migrate/versions/0077_create_tool_tag_association_table.py b/lib/galaxy/model/migrate/versions/0077_create_tool_tag_association_table.py index c44b11d5784e..2a53e6135018 100644 --- a/lib/galaxy/model/migrate/versions/0077_create_tool_tag_association_table.py +++ b/lib/galaxy/model/migrate/versions/0077_create_tool_tag_association_table.py @@ -26,7 +26,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() # Create tool_tag_association table diff --git a/lib/galaxy/model/migrate/versions/0078_add_columns_for_disk_usage_accounting.py b/lib/galaxy/model/migrate/versions/0078_add_columns_for_disk_usage_accounting.py index 98cc3b1001a6..4c628efd261e 100644 --- a/lib/galaxy/model/migrate/versions/0078_add_columns_for_disk_usage_accounting.py +++ b/lib/galaxy/model/migrate/versions/0078_add_columns_for_disk_usage_accounting.py @@ -13,7 +13,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: @@ -22,7 +22,7 @@ def upgrade(migrate_engine): c.create( Dataset_table ) assert c is Dataset_table.c.total_size except Exception as e: - print "Adding total_size column to dataset table failed: %s" % str( e ) + print("Adding total_size column to dataset table failed: %s" % str( e )) log.debug( "Adding total_size column to dataset table failed: %s" % str( e ) ) try: @@ -32,7 +32,7 @@ def upgrade(migrate_engine): assert c is HistoryDatasetAssociation_table.c.purged migrate_engine.execute(HistoryDatasetAssociation_table.update().values(purged=False)) except Exception as e: - print "Adding purged column to history_dataset_association table failed: %s" % str( e ) + print("Adding purged column to history_dataset_association table failed: %s" % str( e )) log.debug( "Adding purged column to history_dataset_association table failed: %s" % str( e ) ) try: @@ -41,7 +41,7 @@ def upgrade(migrate_engine): c.create( User_table, index_name="ix_galaxy_user_disk_usage") assert c is User_table.c.disk_usage except Exception as e: - print "Adding disk_usage column to galaxy_user table failed: %s" % str( e ) + print("Adding disk_usage column to galaxy_user table failed: %s" % str( e )) log.debug( "Adding disk_usage column to galaxy_user table failed: %s" % str( e ) ) try: @@ -50,7 +50,7 @@ def upgrade(migrate_engine): c.create( GalaxySession_table, index_name="ix_galaxy_session_disk_usage") assert c is GalaxySession_table.c.disk_usage except Exception as e: - print "Adding disk_usage column to galaxy_session table failed: %s" % str( e ) + print("Adding disk_usage column to galaxy_session table failed: %s" % str( e )) log.debug( "Adding disk_usage column to galaxy_session table failed: %s" % str( e ) ) @@ -61,26 +61,26 @@ def downgrade(migrate_engine): Dataset_table = Table( "dataset", metadata, autoload=True ) Dataset_table.c.total_size.drop() except Exception as e: - print "Dropping total_size column from dataset table failed: %s" % str( e ) + print("Dropping total_size column from dataset table failed: %s" % str( e )) log.debug( "Dropping total_size column from dataset table failed: %s" % str( e ) ) try: HistoryDatasetAssociation_table = Table( "history_dataset_association", metadata, autoload=True ) HistoryDatasetAssociation_table.c.purged.drop() except Exception as e: - print "Dropping purged column from history_dataset_association table failed: %s" % str( e ) + print("Dropping purged column from history_dataset_association table failed: %s" % str( e )) log.debug( "Dropping purged column from history_dataset_association table failed: %s" % str( e ) ) try: User_table = Table( "galaxy_user", metadata, autoload=True ) User_table.c.disk_usage.drop() except Exception as e: - print "Dropping disk_usage column from galaxy_user table failed: %s" % str( e ) + print("Dropping disk_usage column from galaxy_user table failed: %s" % str( e )) log.debug( "Dropping disk_usage column from galaxy_user table failed: %s" % str( e ) ) try: GalaxySession_table = Table( "galaxy_session", metadata, autoload=True ) GalaxySession_table.c.disk_usage.drop() except Exception as e: - print "Dropping disk_usage column from galaxy_session table failed: %s" % str( e ) + print("Dropping disk_usage column from galaxy_session table failed: %s" % str( e )) log.debug( "Dropping disk_usage column from galaxy_session table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0079_input_library_to_job_table.py b/lib/galaxy/model/migrate/versions/0079_input_library_to_job_table.py index 4cc0d6e0ab6f..6f84cde2b3a5 100644 --- a/lib/galaxy/model/migrate/versions/0079_input_library_to_job_table.py +++ b/lib/galaxy/model/migrate/versions/0079_input_library_to_job_table.py @@ -18,14 +18,14 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() # Create the job_to_input_library_dataset table try: JobToInputLibraryDatasetAssociation_table.create() except Exception as e: - print "Creating job_to_input_library_dataset table failed: %s" % str( e ) + print("Creating job_to_input_library_dataset table failed: %s" % str( e )) log.debug( "Creating job_to_input_library_dataset table failed: %s" % str( e ) ) @@ -37,5 +37,5 @@ def downgrade(migrate_engine): try: JobToInputLibraryDatasetAssociation_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping job_to_input_library_dataset table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0080_quota_tables.py b/lib/galaxy/model/migrate/versions/0080_quota_tables.py index 5420a822f2db..77ca768b10a0 100644 --- a/lib/galaxy/model/migrate/versions/0080_quota_tables.py +++ b/lib/galaxy/model/migrate/versions/0080_quota_tables.py @@ -46,7 +46,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() # Create quota table diff --git a/lib/galaxy/model/migrate/versions/0081_add_tool_version_to_hda_ldda.py b/lib/galaxy/model/migrate/versions/0081_add_tool_version_to_hda_ldda.py index d3781b9dc236..34b3949198b4 100644 --- a/lib/galaxy/model/migrate/versions/0081_add_tool_version_to_hda_ldda.py +++ b/lib/galaxy/model/migrate/versions/0081_add_tool_version_to_hda_ldda.py @@ -8,7 +8,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: hda_table = Table( "history_dataset_association", metadata, autoload=True ) @@ -22,7 +22,7 @@ def upgrade(migrate_engine): assert c is ldda_table.c.tool_version except Exception as e: - print "Adding the tool_version column to the hda/ldda tables failed: ", str( e ) + print("Adding the tool_version column to the hda/ldda tables failed: ", str( e )) def downgrade(migrate_engine): @@ -35,4 +35,4 @@ def downgrade(migrate_engine): ldda_table = Table( "library_dataset_dataset_association", metadata, autoload=True ) ldda_table.c.tool_version.drop() except Exception as e: - print "Dropping the tool_version column from hda/ldda table failed: ", str( e ) + print("Dropping the tool_version column from hda/ldda table failed: ", str( e )) diff --git a/lib/galaxy/model/migrate/versions/0082_add_tool_shed_repository_table.py b/lib/galaxy/model/migrate/versions/0082_add_tool_shed_repository_table.py index 436576ebffae..b7cfe370276b 100644 --- a/lib/galaxy/model/migrate/versions/0082_add_tool_shed_repository_table.py +++ b/lib/galaxy/model/migrate/versions/0082_add_tool_shed_repository_table.py @@ -36,7 +36,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: ToolShedRepository_table.create() diff --git a/lib/galaxy/model/migrate/versions/0083_add_prepare_files_to_task.py b/lib/galaxy/model/migrate/versions/0083_add_prepare_files_to_task.py index 2b9570818dd6..e65d06b8a349 100644 --- a/lib/galaxy/model/migrate/versions/0083_add_prepare_files_to_task.py +++ b/lib/galaxy/model/migrate/versions/0083_add_prepare_files_to_task.py @@ -11,7 +11,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: task_table = Table( "task", metadata, autoload=True ) @@ -19,7 +19,7 @@ def upgrade(migrate_engine): c.create( task_table ) assert c is task_table.c.prepare_input_files_cmd except Exception as e: - print "Adding prepare_input_files_cmd column to task table failed: %s" % str( e ) + print("Adding prepare_input_files_cmd column to task table failed: %s" % str( e )) log.debug( "Adding prepare_input_files_cmd column to task table failed: %s" % str( e ) ) try: task_table = Table( "task", metadata, autoload=True ) @@ -27,7 +27,7 @@ def upgrade(migrate_engine): c.create( task_table ) assert c is task_table.c.working_directory except Exception as e: - print "Adding working_directory column to task table failed: %s" % str( e ) + print("Adding working_directory column to task table failed: %s" % str( e )) log.debug( "Adding working_directory column to task table failed: %s" % str( e ) ) # remove the 'part_file' column - nobody used tasks before this, so no data needs to be migrated @@ -44,13 +44,13 @@ def downgrade(migrate_engine): task_table = Table( "task", metadata, autoload=True ) task_table.c.prepare_input_files_cmd.drop() except Exception as e: - print "Dropping prepare_input_files_cmd column from task table failed: %s" % str( e ) + print("Dropping prepare_input_files_cmd column from task table failed: %s" % str( e )) log.debug( "Dropping prepare_input_files_cmd column from task table failed: %s" % str( e ) ) try: task_table = Table( "task", metadata, autoload=True ) task_table.c.working_directory.drop() except Exception as e: - print "Dropping working_directory column from task table failed: %s" % str( e ) + print("Dropping working_directory column from task table failed: %s" % str( e )) log.debug( "Dropping working_directory column from task table failed: %s" % str( e ) ) try: task_table = Table( "task", metadata, autoload=True ) @@ -58,5 +58,5 @@ def downgrade(migrate_engine): c.create( task_table ) assert c is task_table.c.part_file except Exception as e: - print "Adding part_file column to task table failed: %s" % str( e ) + print("Adding part_file column to task table failed: %s" % str( e )) log.debug( "Adding part_file column to task table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0084_add_ldda_id_to_implicit_conversion_table.py b/lib/galaxy/model/migrate/versions/0084_add_ldda_id_to_implicit_conversion_table.py index 27ed7d01bbce..5dde0a3a1dc1 100644 --- a/lib/galaxy/model/migrate/versions/0084_add_ldda_id_to_implicit_conversion_table.py +++ b/lib/galaxy/model/migrate/versions/0084_add_ldda_id_to_implicit_conversion_table.py @@ -11,7 +11,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: Implicitly_converted_table = Table( "implicitly_converted_dataset_association", metadata, autoload=True ) @@ -22,7 +22,7 @@ def upgrade(migrate_engine): c.create( Implicitly_converted_table, index_name="ix_implicitly_converted_ds_assoc_ldda_id") assert c is Implicitly_converted_table.c.ldda_id except Exception as e: - print "Adding ldda_id column to implicitly_converted_dataset_association table failed: %s" % str( e ) + print("Adding ldda_id column to implicitly_converted_dataset_association table failed: %s" % str( e )) log.debug( "Adding ldda_id column to implicitly_converted_dataset_association table failed: %s" % str( e ) ) @@ -33,5 +33,5 @@ def downgrade(migrate_engine): Implicitly_converted_table = Table( "implicitly_converted_dataset_association", metadata, autoload=True ) Implicitly_converted_table.c.ldda_id.drop() except Exception as e: - print "Dropping ldda_id column from implicitly_converted_dataset_association table failed: %s" % str( e ) + print("Dropping ldda_id column from implicitly_converted_dataset_association table failed: %s" % str( e )) log.debug( "Dropping ldda_id column from implicitly_converted_dataset_association table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0085_add_task_info.py b/lib/galaxy/model/migrate/versions/0085_add_task_info.py index ffdce7b5c322..327bb8303326 100644 --- a/lib/galaxy/model/migrate/versions/0085_add_task_info.py +++ b/lib/galaxy/model/migrate/versions/0085_add_task_info.py @@ -13,7 +13,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: task_table = Table( "task", metadata, autoload=True ) @@ -21,7 +21,7 @@ def upgrade(migrate_engine): c.create( task_table ) assert c is task_table.c.info except Exception as e: - print "Adding info column to table table failed: %s" % str( e ) + print("Adding info column to table table failed: %s" % str( e )) log.debug( "Adding info column to task table failed: %s" % str( e ) ) @@ -32,5 +32,5 @@ def downgrade(migrate_engine): task_table = Table( "task", metadata, autoload=True ) task_table.c.info.drop() except Exception as e: - print "Dropping info column from task table failed: %s" % str( e ) + print("Dropping info column from task table failed: %s" % str( e )) log.debug( "Dropping info column from task table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0086_add_tool_shed_repository_table_columns.py b/lib/galaxy/model/migrate/versions/0086_add_tool_shed_repository_table_columns.py index 7c48f8400b49..81dacdb3dd9d 100644 --- a/lib/galaxy/model/migrate/versions/0086_add_tool_shed_repository_table_columns.py +++ b/lib/galaxy/model/migrate/versions/0086_add_tool_shed_repository_table_columns.py @@ -31,7 +31,7 @@ def get_default_false(migrate_engine): def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True ) c = Column( "metadata", JSONType(), nullable=True ) @@ -39,7 +39,7 @@ def upgrade(migrate_engine): c.create( ToolShedRepository_table ) assert c is ToolShedRepository_table.c.metadata except Exception as e: - print "Adding metadata column to the tool_shed_repository table failed: %s" % str( e ) + print("Adding metadata column to the tool_shed_repository table failed: %s" % str( e )) log.debug( "Adding metadata column to the tool_shed_repository table failed: %s" % str( e ) ) c = Column( "includes_datatypes", Boolean, index=True, default=False ) try: @@ -47,7 +47,7 @@ def upgrade(migrate_engine): assert c is ToolShedRepository_table.c.includes_datatypes migrate_engine.execute( "UPDATE tool_shed_repository SET includes_datatypes=%s" % get_default_false(migrate_engine)) except Exception as e: - print "Adding includes_datatypes column to the tool_shed_repository table failed: %s" % str( e ) + print("Adding includes_datatypes column to the tool_shed_repository table failed: %s" % str( e )) log.debug( "Adding includes_datatypes column to the tool_shed_repository table failed: %s" % str( e ) ) c = Column( "update_available", Boolean, default=False ) try: @@ -55,7 +55,7 @@ def upgrade(migrate_engine): assert c is ToolShedRepository_table.c.update_available migrate_engine.execute( "UPDATE tool_shed_repository SET update_available=%s" % get_default_false(migrate_engine)) except Exception as e: - print "Adding update_available column to the tool_shed_repository table failed: %s" % str( e ) + print("Adding update_available column to the tool_shed_repository table failed: %s" % str( e )) log.debug( "Adding update_available column to the tool_shed_repository table failed: %s" % str( e ) ) @@ -66,15 +66,15 @@ def downgrade(migrate_engine): try: ToolShedRepository_table.c.metadata.drop() except Exception as e: - print "Dropping column metadata from the tool_shed_repository table failed: %s" % str( e ) + print("Dropping column metadata from the tool_shed_repository table failed: %s" % str( e )) log.debug( "Dropping column metadata from the tool_shed_repository table failed: %s" % str( e ) ) try: ToolShedRepository_table.c.includes_datatypes.drop() except Exception as e: - print "Dropping column includes_datatypes from the tool_shed_repository table failed: %s" % str( e ) + print("Dropping column includes_datatypes from the tool_shed_repository table failed: %s" % str( e )) log.debug( "Dropping column includes_datatypes from the tool_shed_repository table failed: %s" % str( e ) ) try: ToolShedRepository_table.c.update_available.drop() except Exception as e: - print "Dropping column update_available from the tool_shed_repository table failed: %s" % str( e ) + print("Dropping column update_available from the tool_shed_repository table failed: %s" % str( e )) log.debug( "Dropping column update_available from the tool_shed_repository table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0087_tool_id_guid_map_table.py b/lib/galaxy/model/migrate/versions/0087_tool_id_guid_map_table.py index b7bdc756799b..b51035bf42e7 100644 --- a/lib/galaxy/model/migrate/versions/0087_tool_id_guid_map_table.py +++ b/lib/galaxy/model/migrate/versions/0087_tool_id_guid_map_table.py @@ -35,7 +35,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: ToolIdGuidMap_table.create() diff --git a/lib/galaxy/model/migrate/versions/0088_add_installed_changeset_revison_column.py b/lib/galaxy/model/migrate/versions/0088_add_installed_changeset_revison_column.py index e12d721050b3..38ad15192872 100644 --- a/lib/galaxy/model/migrate/versions/0088_add_installed_changeset_revison_column.py +++ b/lib/galaxy/model/migrate/versions/0088_add_installed_changeset_revison_column.py @@ -24,7 +24,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True ) col = Column( "installed_changeset_revision", TrimmedString( 255 ) ) @@ -32,7 +32,7 @@ def upgrade(migrate_engine): col.create( ToolShedRepository_table ) assert col is ToolShedRepository_table.c.installed_changeset_revision except Exception as e: - print "Adding installed_changeset_revision column to the tool_shed_repository table failed: %s" % str( e ) + print("Adding installed_changeset_revision column to the tool_shed_repository table failed: %s" % str( e )) log.debug( "Adding installed_changeset_revision column to the tool_shed_repository table failed: %s" % str( e ) ) # Update each row by setting the value of installed_changeset_revison to be the value of changeset_revision. # This will be problematic if the value of changeset_revision was updated to something other than the value @@ -51,7 +51,7 @@ def upgrade(migrate_engine): + "WHERE changeset_revision = '%s';" % row.changeset_revision migrate_engine.execute( cmd ) update_count += 1 - print "Updated the installed_changeset_revision column for ", update_count, " rows in the tool_shed_repository table. " + print("Updated the installed_changeset_revision column for ", update_count, " rows in the tool_shed_repository table. ") def downgrade(migrate_engine): @@ -61,5 +61,5 @@ def downgrade(migrate_engine): try: ToolShedRepository_table.c.installed_changeset_revision.drop() except Exception as e: - print "Dropping column installed_changeset_revision from the tool_shed_repository table failed: %s" % str( e ) + print("Dropping column installed_changeset_revision from the tool_shed_repository table failed: %s" % str( e )) log.debug( "Dropping column installed_changeset_revision from the tool_shed_repository table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0089_add_object_store_id_columns.py b/lib/galaxy/model/migrate/versions/0089_add_object_store_id_columns.py index efabbd5ab267..acf4469413a4 100644 --- a/lib/galaxy/model/migrate/versions/0089_add_object_store_id_columns.py +++ b/lib/galaxy/model/migrate/versions/0089_add_object_store_id_columns.py @@ -13,7 +13,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() for t_name in ( 'dataset', 'job', 'metadata_file' ): t = Table( t_name, metadata, autoload=True ) @@ -22,7 +22,7 @@ def upgrade(migrate_engine): c.create( t, index_name="ix_%s_object_store_id" % t_name) assert c is t.c.object_store_id except Exception as e: - print "Adding object_store_id column to %s table failed: %s" % ( t_name, str( e ) ) + print("Adding object_store_id column to %s table failed: %s" % ( t_name, str( e ) )) log.debug( "Adding object_store_id column to %s table failed: %s" % ( t_name, str( e ) ) ) @@ -34,5 +34,5 @@ def downgrade(migrate_engine): try: t.c.object_store_id.drop() except Exception as e: - print "Dropping object_store_id column from %s table failed: %s" % ( t_name, str( e ) ) + print("Dropping object_store_id column from %s table failed: %s" % ( t_name, str( e ) )) log.debug( "Dropping object_store_id column from %s table failed: %s" % ( t_name, str( e ) ) ) diff --git a/lib/galaxy/model/migrate/versions/0090_add_tool_shed_repository_table_columns.py b/lib/galaxy/model/migrate/versions/0090_add_tool_shed_repository_table_columns.py index eb209758ba5a..8b35c9aab191 100644 --- a/lib/galaxy/model/migrate/versions/0090_add_tool_shed_repository_table_columns.py +++ b/lib/galaxy/model/migrate/versions/0090_add_tool_shed_repository_table_columns.py @@ -28,7 +28,7 @@ def default_false(migrate_engine): def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True ) c = Column( "uninstalled", Boolean, default=False ) @@ -37,14 +37,14 @@ def upgrade(migrate_engine): assert c is ToolShedRepository_table.c.uninstalled migrate_engine.execute( "UPDATE tool_shed_repository SET uninstalled=%s" % default_false(migrate_engine) ) except Exception as e: - print "Adding uninstalled column to the tool_shed_repository table failed: %s" % str( e ) + print("Adding uninstalled column to the tool_shed_repository table failed: %s" % str( e )) c = Column( "dist_to_shed", Boolean, default=False ) try: c.create( ToolShedRepository_table ) assert c is ToolShedRepository_table.c.dist_to_shed migrate_engine.execute( "UPDATE tool_shed_repository SET dist_to_shed=%s" % default_false(migrate_engine) ) except Exception as e: - print "Adding dist_to_shed column to the tool_shed_repository table failed: %s" % str( e ) + print("Adding dist_to_shed column to the tool_shed_repository table failed: %s" % str( e )) def downgrade(migrate_engine): @@ -54,8 +54,8 @@ def downgrade(migrate_engine): try: ToolShedRepository_table.c.uninstalled.drop() except Exception as e: - print "Dropping column uninstalled from the tool_shed_repository table failed: %s" % str( e ) + print("Dropping column uninstalled from the tool_shed_repository table failed: %s" % str( e )) try: ToolShedRepository_table.c.dist_to_shed.drop() except Exception as e: - print "Dropping column dist_to_shed from the tool_shed_repository table failed: %s" % str( e ) + print("Dropping column dist_to_shed from the tool_shed_repository table failed: %s" % str( e )) diff --git a/lib/galaxy/model/migrate/versions/0091_add_tool_version_tables.py b/lib/galaxy/model/migrate/versions/0091_add_tool_version_tables.py index cf2967315c0a..f9ae343ac365 100644 --- a/lib/galaxy/model/migrate/versions/0091_add_tool_version_tables.py +++ b/lib/galaxy/model/migrate/versions/0091_add_tool_version_tables.py @@ -55,7 +55,7 @@ def localtimestamp(migrate_engine): def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) ToolIdGuidMap_table = Table( "tool_id_guid_map", metadata, autoload=True ) @@ -85,7 +85,7 @@ def upgrade(migrate_engine): ( nextval( migrate_engine, 'tool_version' ), localtimestamp( migrate_engine ), localtimestamp( migrate_engine ), tool_dict[ 'guid' ], tool_shed_repository_id ) migrate_engine.execute( cmd ) count += 1 - print "Added %d rows to the new tool_version table." % count + print("Added %d rows to the new tool_version table." % count) # Drop the tool_id_guid_map table since the 2 new tables render it unnecessary. try: ToolIdGuidMap_table.drop() diff --git a/lib/galaxy/model/migrate/versions/0092_add_migrate_tools_table.py b/lib/galaxy/model/migrate/versions/0092_add_migrate_tools_table.py index af5b8afa1f23..e9d55d264bc3 100644 --- a/lib/galaxy/model/migrate/versions/0092_add_migrate_tools_table.py +++ b/lib/galaxy/model/migrate/versions/0092_add_migrate_tools_table.py @@ -29,7 +29,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() # Create the table. diff --git a/lib/galaxy/model/migrate/versions/0093_add_job_params_col.py b/lib/galaxy/model/migrate/versions/0093_add_job_params_col.py index c8e6d607ced3..12d1fa876ef2 100644 --- a/lib/galaxy/model/migrate/versions/0093_add_job_params_col.py +++ b/lib/galaxy/model/migrate/versions/0093_add_job_params_col.py @@ -16,13 +16,13 @@ def display_migration_details(): - print "" - print "This migration script adds a 'params' column to the Job table." + print("") + print("This migration script adds a 'params' column to the Job table.") def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() # Add column to Job table. @@ -32,7 +32,7 @@ def upgrade(migrate_engine): assert params_col is Job_table.c.params except Exception as e: - print str(e) + print(str(e)) log.debug( "Adding column 'params' to job table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0094_add_job_handler_col.py b/lib/galaxy/model/migrate/versions/0094_add_job_handler_col.py index 0d996fe6e58c..46ddd8e5b8ec 100644 --- a/lib/galaxy/model/migrate/versions/0094_add_job_handler_col.py +++ b/lib/galaxy/model/migrate/versions/0094_add_job_handler_col.py @@ -16,13 +16,13 @@ def display_migration_details(): - print "" - print "This migration script adds a 'handler' column to the Job table." + print("") + print("This migration script adds a 'handler' column to the Job table.") def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() # Add column to Job table. @@ -32,7 +32,7 @@ def upgrade(migrate_engine): assert handler_col is Job_table.c.handler except Exception as e: - print str(e) + print(str(e)) log.debug( "Adding column 'handler' to job table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0095_hda_subsets.py b/lib/galaxy/model/migrate/versions/0095_hda_subsets.py index 3ca255eb23cb..d1c15c9052c6 100644 --- a/lib/galaxy/model/migrate/versions/0095_hda_subsets.py +++ b/lib/galaxy/model/migrate/versions/0095_hda_subsets.py @@ -19,14 +19,14 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() # Create history_dataset_association_subset. try: HistoryDatasetAssociationSubset_table.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Creating history_dataset_association_subset table failed: %s" % str( e ) ) # Manually create indexes because they are too long for MySQL databases. @@ -36,7 +36,7 @@ def upgrade(migrate_engine): i1.create() i2.create() except Exception as e: - print str(e) + print(str(e)) log.debug( "Adding indices to table 'history_dataset_association_subset' table failed: %s" % str( e ) ) @@ -48,5 +48,5 @@ def downgrade(migrate_engine): try: HistoryDatasetAssociationSubset_table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug( "Dropping history_dataset_association_subset table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0096_openid_provider.py b/lib/galaxy/model/migrate/versions/0096_openid_provider.py index 89792e5cd468..99481801b744 100644 --- a/lib/galaxy/model/migrate/versions/0096_openid_provider.py +++ b/lib/galaxy/model/migrate/versions/0096_openid_provider.py @@ -15,7 +15,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: @@ -24,7 +24,7 @@ def upgrade(migrate_engine): c.create( OpenID_table ) assert c is OpenID_table.c.provider except Exception as e: - print "Adding provider column to galaxy_user_openid table failed: %s" % str( e ) + print("Adding provider column to galaxy_user_openid table failed: %s" % str( e )) log.debug( "Adding provider column to galaxy_user_openid table failed: %s" % str( e ) ) try: @@ -41,5 +41,5 @@ def downgrade(migrate_engine): OpenID_table = Table( "galaxy_user_openid", metadata, autoload=True ) OpenID_table.c.provider.drop() except Exception as e: - print "Dropping provider column from galaxy_user_openid table failed: %s" % str( e ) + print("Dropping provider column from galaxy_user_openid table failed: %s" % str( e )) log.debug( "Dropping provider column from galaxy_user_openid table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0097_add_ctx_rev_column.py b/lib/galaxy/model/migrate/versions/0097_add_ctx_rev_column.py index 10076ae2ed84..5f294e37ed2b 100644 --- a/lib/galaxy/model/migrate/versions/0097_add_ctx_rev_column.py +++ b/lib/galaxy/model/migrate/versions/0097_add_ctx_rev_column.py @@ -24,7 +24,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True ) col = Column( "ctx_rev", TrimmedString( 10 ) ) @@ -32,7 +32,7 @@ def upgrade(migrate_engine): col.create( ToolShedRepository_table ) assert col is ToolShedRepository_table.c.ctx_rev except Exception as e: - print "Adding ctx_rev column to the tool_shed_repository table failed: %s" % str( e ) + print("Adding ctx_rev column to the tool_shed_repository table failed: %s" % str( e )) def downgrade(migrate_engine): @@ -42,4 +42,4 @@ def downgrade(migrate_engine): try: ToolShedRepository_table.c.ctx_rev.drop() except Exception as e: - print "Dropping column ctx_rev from the tool_shed_repository table failed: %s" % str( e ) + print("Dropping column ctx_rev from the tool_shed_repository table failed: %s" % str( e )) diff --git a/lib/galaxy/model/migrate/versions/0098_genome_index_tool_data_table.py b/lib/galaxy/model/migrate/versions/0098_genome_index_tool_data_table.py index dd86fd290a62..eff33d3f9fce 100644 --- a/lib/galaxy/model/migrate/versions/0098_genome_index_tool_data_table.py +++ b/lib/galaxy/model/migrate/versions/0098_genome_index_tool_data_table.py @@ -34,7 +34,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: diff --git a/lib/galaxy/model/migrate/versions/0099_add_tool_dependency_table.py b/lib/galaxy/model/migrate/versions/0099_add_tool_dependency_table.py index f04f19282f3b..3994679e7d30 100644 --- a/lib/galaxy/model/migrate/versions/0099_add_tool_dependency_table.py +++ b/lib/galaxy/model/migrate/versions/0099_add_tool_dependency_table.py @@ -35,7 +35,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: ToolDependency_table.create() diff --git a/lib/galaxy/model/migrate/versions/0100_alter_tool_dependency_table_version_column.py b/lib/galaxy/model/migrate/versions/0100_alter_tool_dependency_table_version_column.py index eefe3b0330e9..edeec5578938 100644 --- a/lib/galaxy/model/migrate/versions/0100_alter_tool_dependency_table_version_column.py +++ b/lib/galaxy/model/migrate/versions/0100_alter_tool_dependency_table_version_column.py @@ -20,7 +20,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() Table( "tool_dependency", metadata, autoload=True ) # Change the tool_dependency table's version column from TrimmedString to Text. diff --git a/lib/galaxy/model/migrate/versions/0101_drop_installed_changeset_revision_column.py b/lib/galaxy/model/migrate/versions/0101_drop_installed_changeset_revision_column.py index a5aaf627fed4..15f646d8c310 100644 --- a/lib/galaxy/model/migrate/versions/0101_drop_installed_changeset_revision_column.py +++ b/lib/galaxy/model/migrate/versions/0101_drop_installed_changeset_revision_column.py @@ -22,7 +22,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: ToolDependency_table = Table( "tool_dependency", metadata, autoload=True ) diff --git a/lib/galaxy/model/migrate/versions/0102_add_tool_dependency_status_columns.py b/lib/galaxy/model/migrate/versions/0102_add_tool_dependency_status_columns.py index 45887cb05845..889d86351576 100644 --- a/lib/galaxy/model/migrate/versions/0102_add_tool_dependency_status_columns.py +++ b/lib/galaxy/model/migrate/versions/0102_add_tool_dependency_status_columns.py @@ -24,7 +24,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() ToolDependency_table = Table( "tool_dependency", metadata, autoload=True ) if migrate_engine.name == 'sqlite': @@ -35,13 +35,13 @@ def upgrade(migrate_engine): col.create( ToolDependency_table ) assert col is ToolDependency_table.c.status except Exception as e: - print "Adding status column to the tool_dependency table failed: %s" % str( e ) + print("Adding status column to the tool_dependency table failed: %s" % str( e )) col = Column( "error_message", TEXT ) try: col.create( ToolDependency_table ) assert col is ToolDependency_table.c.error_message except Exception as e: - print "Adding error_message column to the tool_dependency table failed: %s" % str( e ) + print("Adding error_message column to the tool_dependency table failed: %s" % str( e )) if migrate_engine.name != 'sqlite': # This breaks in sqlite due to failure to drop check constraint. @@ -49,7 +49,7 @@ def upgrade(migrate_engine): try: ToolDependency_table.c.uninstalled.drop() except Exception as e: - print "Dropping uninstalled column from the tool_dependency table failed: %s" % str( e ) + print("Dropping uninstalled column from the tool_dependency table failed: %s" % str( e )) def downgrade(migrate_engine): @@ -59,14 +59,14 @@ def downgrade(migrate_engine): try: ToolDependency_table.c.status.drop() except Exception as e: - print "Dropping column status from the tool_dependency table failed: %s" % str( e ) + print("Dropping column status from the tool_dependency table failed: %s" % str( e )) try: ToolDependency_table.c.error_message.drop() except Exception as e: - print "Dropping column error_message from the tool_dependency table failed: %s" % str( e ) + print("Dropping column error_message from the tool_dependency table failed: %s" % str( e )) col = Column( "uninstalled", Boolean, default=False ) try: col.create( ToolDependency_table ) assert col is ToolDependency_table.c.uninstalled except Exception as e: - print "Adding uninstalled column to the tool_dependency table failed: %s" % str( e ) + print("Adding uninstalled column to the tool_dependency table failed: %s" % str( e )) diff --git a/lib/galaxy/model/migrate/versions/0103_add_tool_shed_repository_status_columns.py b/lib/galaxy/model/migrate/versions/0103_add_tool_shed_repository_status_columns.py index a9a2e963c90c..78ccd57b9873 100644 --- a/lib/galaxy/model/migrate/versions/0103_add_tool_shed_repository_status_columns.py +++ b/lib/galaxy/model/migrate/versions/0103_add_tool_shed_repository_status_columns.py @@ -12,7 +12,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True ) # Add the status column to the tool_shed_repository table. @@ -21,38 +21,38 @@ def upgrade(migrate_engine): col.create( ToolShedRepository_table ) assert col is ToolShedRepository_table.c.status except Exception as e: - print "Adding status column to the tool_shed_repository table failed: %s" % str( e ) + print("Adding status column to the tool_shed_repository table failed: %s" % str( e )) # Add the error_message column to the tool_shed_repository table. col = Column( "error_message", TEXT ) try: col.create( ToolShedRepository_table ) assert col is ToolShedRepository_table.c.error_message except Exception as e: - print "Adding error_message column to the tool_shed_repository table failed: %s" % str( e ) + print("Adding error_message column to the tool_shed_repository table failed: %s" % str( e )) # Update the status column value for tool_shed_repositories to the default value 'Installed'. cmd = "UPDATE tool_shed_repository SET status = 'Installed';" try: migrate_engine.execute( cmd ) except Exception as e: - print "Exception executing sql command: " - print cmd - print str( e ) + print("Exception executing sql command: ") + print(cmd) + print(str( e )) # Update the status column for tool_shed_repositories that have been uninstalled. cmd = "UPDATE tool_shed_repository SET status = 'Uninstalled' WHERE uninstalled;" try: migrate_engine.execute( cmd ) except Exception as e: - print "Exception executing sql command: " - print cmd - print str( e ) + print("Exception executing sql command: ") + print(cmd) + print(str( e )) # Update the status column for tool_shed_repositories that have been deactivated. cmd = "UPDATE tool_shed_repository SET status = 'Deactivated' where deleted and not uninstalled;" try: migrate_engine.execute( cmd ) except Exception as e: - print "Exception executing sql command: " - print cmd - print str( e ) + print("Exception executing sql command: ") + print(cmd) + print(str( e )) def downgrade(migrate_engine): @@ -62,8 +62,8 @@ def downgrade(migrate_engine): try: ToolShedRepository_table.c.status.drop() except Exception as e: - print "Dropping column status from the tool_shed_repository table failed: %s" % str( e ) + print("Dropping column status from the tool_shed_repository table failed: %s" % str( e )) try: ToolShedRepository_table.c.error_message.drop() except Exception as e: - print "Dropping column error_message from the tool_shed_repository table failed: %s" % str( e ) + print("Dropping column error_message from the tool_shed_repository table failed: %s" % str( e )) diff --git a/lib/galaxy/model/migrate/versions/0105_add_cleanup_event_table.py b/lib/galaxy/model/migrate/versions/0105_add_cleanup_event_table.py index adfcd25c9719..2782ea9748d2 100644 --- a/lib/galaxy/model/migrate/versions/0105_add_cleanup_event_table.py +++ b/lib/galaxy/model/migrate/versions/0105_add_cleanup_event_table.py @@ -83,7 +83,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: CleanupEvent_table.create() diff --git a/lib/galaxy/model/migrate/versions/0106_add_missing_indexes.py b/lib/galaxy/model/migrate/versions/0106_add_missing_indexes.py index b237c8929b6d..bd4ad57275d5 100644 --- a/lib/galaxy/model/migrate/versions/0106_add_missing_indexes.py +++ b/lib/galaxy/model/migrate/versions/0106_add_missing_indexes.py @@ -49,7 +49,7 @@ def upgrade(migrate_engine): - print __doc__ + print(__doc__) metadata.bind = migrate_engine metadata.reflect() insp = reflection.Inspector.from_engine(migrate_engine) diff --git a/lib/galaxy/model/migrate/versions/0107_add_exit_code_to_job_and_task.py b/lib/galaxy/model/migrate/versions/0107_add_exit_code_to_job_and_task.py index 305c2031b5ff..84f81792c216 100644 --- a/lib/galaxy/model/migrate/versions/0107_add_exit_code_to_job_and_task.py +++ b/lib/galaxy/model/migrate/versions/0107_add_exit_code_to_job_and_task.py @@ -15,12 +15,12 @@ def display_migration_details(): - print "" - print "This migration script adds a 'handler' column to the Job table." + print("") + print("This migration script adds a 'handler' column to the Job table.") def upgrade(migrate_engine): - print __doc__ + print(__doc__) metadata.bind = migrate_engine metadata.reflect() @@ -30,7 +30,7 @@ def upgrade(migrate_engine): exit_code_job_col.create( job_table ) assert exit_code_job_col is job_table.c.exit_code except Exception as e: - print str(e) + print(str(e)) log.error( "Adding column 'exit_code' to job table failed: %s" % str( e ) ) return @@ -40,7 +40,7 @@ def upgrade(migrate_engine): exit_code_task_col.create( task_table ) assert exit_code_task_col is task_table.c.exit_code except Exception as e: - print str(e) + print(str(e)) log.error( "Adding column 'exit_code' to task table failed: %s" % str( e ) ) return diff --git a/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py b/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py index 958bfefb859c..c083c8d08db3 100644 --- a/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py +++ b/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py @@ -27,11 +27,11 @@ def display_migration_details(): - print "This migration script adds a ExtendedMetadata tables" + print("This migration script adds a ExtendedMetadata tables") def upgrade(migrate_engine): - print __doc__ + print(__doc__) metadata.bind = migrate_engine metadata.reflect() try: @@ -48,7 +48,7 @@ def upgrade(migrate_engine): extended_metadata_ldda_col.create( ldda_table ) assert extended_metadata_ldda_col is ldda_table.c.extended_metadata_id except Exception as e: - print str(e) + print(str(e)) log.error( "Adding column 'extended_metadata_id' to library_dataset_dataset_association table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0109_add_repository_dependency_tables.py b/lib/galaxy/model/migrate/versions/0109_add_repository_dependency_tables.py index 49016323c05a..ded3ed515960 100644 --- a/lib/galaxy/model/migrate/versions/0109_add_repository_dependency_tables.py +++ b/lib/galaxy/model/migrate/versions/0109_add_repository_dependency_tables.py @@ -33,7 +33,7 @@ def upgrade(migrate_engine): - print __doc__ + print(__doc__) metadata.bind = migrate_engine metadata.reflect() try: diff --git a/lib/galaxy/model/migrate/versions/0110_add_dataset_uuid.py b/lib/galaxy/model/migrate/versions/0110_add_dataset_uuid.py index 2b32fec29ed9..cdf1d4e70c40 100644 --- a/lib/galaxy/model/migrate/versions/0110_add_dataset_uuid.py +++ b/lib/galaxy/model/migrate/versions/0110_add_dataset_uuid.py @@ -12,12 +12,12 @@ def display_migration_details(): - print "" - print "This migration adds uuid column to dataset table" + print("") + print("This migration adds uuid column to dataset table") def upgrade(migrate_engine): - print __doc__ + print(__doc__) metadata = MetaData() metadata.bind = migrate_engine metadata.reflect() @@ -28,7 +28,7 @@ def upgrade(migrate_engine): dataset_uuid_column.create( dataset_table ) assert dataset_uuid_column is dataset_table.c.uuid except Exception as e: - print str(e) + print(str(e)) log.error( "Adding column 'uuid' to dataset table failed: %s" % str( e ) ) return diff --git a/lib/galaxy/model/migrate/versions/0111_add_job_destinations.py b/lib/galaxy/model/migrate/versions/0111_add_job_destinations.py index 4eb8b6b89a55..bfb1dc4e6d14 100644 --- a/lib/galaxy/model/migrate/versions/0111_add_job_destinations.py +++ b/lib/galaxy/model/migrate/versions/0111_add_job_destinations.py @@ -11,12 +11,12 @@ def display_migration_details(): - print "" - print "This migration script adds 'destination_id' and 'destination_params' columns to the Job table." + print("") + print("This migration script adds 'destination_id' and 'destination_params' columns to the Job table.") def upgrade(migrate_engine): - print __doc__ + print(__doc__) metadata = MetaData() metadata.bind = migrate_engine metadata.reflect() diff --git a/lib/galaxy/model/migrate/versions/0112_add_data_manager_history_association_and_data_manager_job_association_tables.py b/lib/galaxy/model/migrate/versions/0112_add_data_manager_history_association_and_data_manager_job_association_tables.py index 0e63ffeba1e4..d62f2722ee2e 100644 --- a/lib/galaxy/model/migrate/versions/0112_add_data_manager_history_association_and_data_manager_job_association_tables.py +++ b/lib/galaxy/model/migrate/versions/0112_add_data_manager_history_association_and_data_manager_job_association_tables.py @@ -27,7 +27,7 @@ def upgrade(migrate_engine): - print __doc__ + print(__doc__) metadata.bind = migrate_engine metadata.reflect() try: diff --git a/lib/galaxy/model/migrate/versions/0113_update_migrate_tools_table.py b/lib/galaxy/model/migrate/versions/0113_update_migrate_tools_table.py index c6a1c1ed03d4..07d54bb9ae4e 100644 --- a/lib/galaxy/model/migrate/versions/0113_update_migrate_tools_table.py +++ b/lib/galaxy/model/migrate/versions/0113_update_migrate_tools_table.py @@ -16,7 +16,7 @@ def upgrade(migrate_engine): - print __doc__ + print(__doc__) # Create the table. try: cmd = "UPDATE migrate_tools set repository_path='lib/galaxy/tool_shed/migrate';" diff --git a/lib/galaxy/model/migrate/versions/0114_update_migrate_tools_table_again.py b/lib/galaxy/model/migrate/versions/0114_update_migrate_tools_table_again.py index f54fd9934d28..4408533a57bb 100644 --- a/lib/galaxy/model/migrate/versions/0114_update_migrate_tools_table_again.py +++ b/lib/galaxy/model/migrate/versions/0114_update_migrate_tools_table_again.py @@ -16,7 +16,7 @@ def upgrade(migrate_engine): - print __doc__ + print(__doc__) # Create the table. try: cmd = "UPDATE migrate_tools set repository_path='lib/tool_shed/galaxy_install/migrate';" diff --git a/lib/galaxy/model/migrate/versions/0116_drop_update_available_col_add_tool_shed_status_col.py b/lib/galaxy/model/migrate/versions/0116_drop_update_available_col_add_tool_shed_status_col.py index 830f8cd12bf6..a5040e9b5322 100644 --- a/lib/galaxy/model/migrate/versions/0116_drop_update_available_col_add_tool_shed_status_col.py +++ b/lib/galaxy/model/migrate/versions/0116_drop_update_available_col_add_tool_shed_status_col.py @@ -31,7 +31,7 @@ def default_false( migrate_engine ): def upgrade( migrate_engine ): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True ) @@ -45,13 +45,13 @@ def upgrade( migrate_engine ): col = ToolShedRepository_table.c.update_available col.drop() except Exception as e: - print "Dropping column update_available from the tool_shed_repository table failed: %s" % str( e ) + print("Dropping column update_available from the tool_shed_repository table failed: %s" % str( e )) c = Column( "tool_shed_status", JSONType, nullable=True ) try: c.create( ToolShedRepository_table ) assert c is ToolShedRepository_table.c.tool_shed_status except Exception as e: - print "Adding tool_shed_status column to the tool_shed_repository table failed: %s" % str( e ) + print("Adding tool_shed_status column to the tool_shed_repository table failed: %s" % str( e )) def downgrade( migrate_engine ): @@ -69,11 +69,11 @@ def downgrade( migrate_engine ): col = ToolShedRepository_table.c.tool_shed_status col.drop() except Exception as e: - print "Dropping column tool_shed_status from the tool_shed_repository table failed: %s" % str( e ) + print("Dropping column tool_shed_status from the tool_shed_repository table failed: %s" % str( e )) c = Column( "update_available", Boolean, default=False ) try: c.create( ToolShedRepository_table ) assert c is ToolShedRepository_table.c.update_available migrate_engine.execute( "UPDATE tool_shed_repository SET update_available=%s" % default_false( migrate_engine ) ) except Exception as e: - print "Adding column update_available to the tool_shed_repository table failed: %s" % str( e ) + print("Adding column update_available to the tool_shed_repository table failed: %s" % str( e )) diff --git a/lib/galaxy/model/migrate/versions/0117_add_user_activation.py b/lib/galaxy/model/migrate/versions/0117_add_user_activation.py index 28c618b3e608..b01dc907464c 100644 --- a/lib/galaxy/model/migrate/versions/0117_add_user_activation.py +++ b/lib/galaxy/model/migrate/versions/0117_add_user_activation.py @@ -17,12 +17,12 @@ def display_migration_details(): - print "" - print "This migration script adds active and activation_token columns to the user table" + print("") + print("This migration script adds active and activation_token columns to the user table") def upgrade(migrate_engine): - print __doc__ + print(__doc__) metadata = MetaData() metadata.bind = migrate_engine metadata.reflect() @@ -35,7 +35,7 @@ def upgrade(migrate_engine): assert user_active_column is user_table.c.active assert user_activation_token_column is user_table.c.activation_token except Exception as e: - print str(e) + print(str(e)) log.error( "Adding columns 'active' and 'activation_token' to galaxy_user table failed: %s" % str( e ) ) return diff --git a/lib/galaxy/model/migrate/versions/0118_add_hda_extended_metadata.py b/lib/galaxy/model/migrate/versions/0118_add_hda_extended_metadata.py index f846ec5e44e7..a9a9f4c0e712 100644 --- a/lib/galaxy/model/migrate/versions/0118_add_hda_extended_metadata.py +++ b/lib/galaxy/model/migrate/versions/0118_add_hda_extended_metadata.py @@ -11,11 +11,11 @@ def display_migration_details(): - print "This migration script adds a ExtendedMetadata links to HistoryDatasetAssociation tables" + print("This migration script adds a ExtendedMetadata links to HistoryDatasetAssociation tables") def upgrade(migrate_engine): - print __doc__ + print(__doc__) metadata.bind = migrate_engine metadata.reflect() @@ -24,7 +24,7 @@ def upgrade(migrate_engine): extended_metadata_hda_col.create( hda_table ) assert extended_metadata_hda_col is hda_table.c.extended_metadata_id except Exception as e: - print str(e) + print(str(e)) log.error( "Adding column 'extended_metadata_id' to history_dataset_association table failed: %s" % str( e ) ) diff --git a/lib/galaxy/model/migrate/versions/0119_job_metrics.py b/lib/galaxy/model/migrate/versions/0119_job_metrics.py index 94607d1e0ba3..4a8136243dfd 100644 --- a/lib/galaxy/model/migrate/versions/0119_job_metrics.py +++ b/lib/galaxy/model/migrate/versions/0119_job_metrics.py @@ -66,7 +66,7 @@ def upgrade( migrate_engine ): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() for table in TABLES: @@ -85,7 +85,7 @@ def __create(table): try: table.create() except Exception as e: - print str(e) + print(str(e)) log.debug("Creating %s table failed: %s" % (table.name, str( e ) ) ) @@ -93,5 +93,5 @@ def __drop(table): try: table.drop() except Exception as e: - print str(e) + print(str(e)) log.debug("Dropping %s table failed: %s" % (table.name, str( e ) ) ) diff --git a/lib/galaxy/model/migrate/versions/0120_dataset_collections.py b/lib/galaxy/model/migrate/versions/0120_dataset_collections.py index b6c0b533a0ff..eec3d3bfb3ab 100644 --- a/lib/galaxy/model/migrate/versions/0120_dataset_collections.py +++ b/lib/galaxy/model/migrate/versions/0120_dataset_collections.py @@ -128,7 +128,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() for table in TABLES: @@ -138,7 +138,7 @@ def upgrade(migrate_engine): hda_table = Table( "history_dataset_association", metadata, autoload=True ) HiddenBeneathCollection_column.create( hda_table ) except Exception as e: - print str(e) + print(str(e)) log.exception( "Creating HDA column failed." ) @@ -154,7 +154,7 @@ def downgrade(migrate_engine): hidden_beneath_collection_instance_id_col = hda_table.c.hidden_beneath_collection_instance_id hidden_beneath_collection_instance_id_col.drop() except Exception as e: - print str(e) + print(str(e)) log.exception( "Dropping HDA column failed." ) @@ -162,7 +162,7 @@ def __create(table): try: table.create() except Exception as e: - print str(e) + print(str(e)) log.exception("Creating %s table failed: %s" % (table.name, str( e ) ) ) @@ -170,5 +170,5 @@ def __drop(table): try: table.drop() except Exception as e: - print str(e) + print(str(e)) log.exception("Dropping %s table failed: %s" % (table.name, str( e ) ) ) diff --git a/lib/galaxy/model/migrate/versions/0121_workflow_uuids.py b/lib/galaxy/model/migrate/versions/0121_workflow_uuids.py index 6075b34bf3f4..247ca660a498 100644 --- a/lib/galaxy/model/migrate/versions/0121_workflow_uuids.py +++ b/lib/galaxy/model/migrate/versions/0121_workflow_uuids.py @@ -20,11 +20,11 @@ def display_migration_details(): - print "This migration script adds a UUID column to workflows" + print("This migration script adds a UUID column to workflows") def upgrade(migrate_engine): - print __doc__ + print(__doc__) metadata.bind = migrate_engine metadata.reflect() @@ -34,7 +34,7 @@ def upgrade(migrate_engine): workflow_uuid_column.create( workflow_table ) assert workflow_uuid_column is workflow_table.c.uuid except Exception as e: - print str(e) + print(str(e)) log.error( "Adding column 'uuid' to workflow table failed: %s" % str( e ) ) return diff --git a/lib/galaxy/model/migrate/versions/0122_grow_mysql_blobs.py b/lib/galaxy/model/migrate/versions/0122_grow_mysql_blobs.py index 41b03dfe9dc8..3005fb11e9dd 100644 --- a/lib/galaxy/model/migrate/versions/0122_grow_mysql_blobs.py +++ b/lib/galaxy/model/migrate/versions/0122_grow_mysql_blobs.py @@ -36,7 +36,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() if migrate_engine.name != "mysql": @@ -47,8 +47,8 @@ def upgrade(migrate_engine): try: migrate_engine.execute( cmd ) except Exception as e: - print "Failed to grow column %s.%s" % (table, column) - print str( e ) + print("Failed to grow column %s.%s" % (table, column)) + print(str( e )) def downgrade(migrate_engine): diff --git a/lib/galaxy/model/migrate/versions/0123_add_workflow_request_tables.py b/lib/galaxy/model/migrate/versions/0123_add_workflow_request_tables.py index 1236096dcb2b..8693e994c103 100644 --- a/lib/galaxy/model/migrate/versions/0123_add_workflow_request_tables.py +++ b/lib/galaxy/model/migrate/versions/0123_add_workflow_request_tables.py @@ -62,7 +62,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() for table in TABLES: @@ -112,7 +112,7 @@ def __add_column(column, table_name, metadata, **kwds): table = Table( table_name, metadata, autoload=True ) column.create( table, **kwds ) except Exception as e: - print str(e) + print(str(e)) log.exception( "Adding column %s column failed." % column) @@ -121,7 +121,7 @@ def __drop_column( column_name, table_name, metadata ): table = Table( table_name, metadata, autoload=True ) getattr( table.c, column_name ).drop() except Exception as e: - print str(e) + print(str(e)) log.exception( "Dropping column %s failed." % column_name ) @@ -129,7 +129,7 @@ def __create(table): try: table.create() except Exception as e: - print str(e) + print(str(e)) log.exception("Creating %s table failed: %s" % (table.name, str( e ) ) ) @@ -137,5 +137,5 @@ def __drop(table): try: table.drop() except Exception as e: - print str(e) + print(str(e)) log.exception("Dropping %s table failed: %s" % (table.name, str( e ) ) ) diff --git a/lib/galaxy/model/migrate/versions/0124_job_state_history.py b/lib/galaxy/model/migrate/versions/0124_job_state_history.py index 35189f61f59f..d4a25816874c 100644 --- a/lib/galaxy/model/migrate/versions/0124_job_state_history.py +++ b/lib/galaxy/model/migrate/versions/0124_job_state_history.py @@ -23,13 +23,13 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: JobStateHistory_table.create() except Exception as e: - print str(e) + print(str(e)) log.exception("Creating %s table failed: %s" % (JobStateHistory_table.name, str( e ) ) ) @@ -40,5 +40,5 @@ def downgrade(migrate_engine): try: JobStateHistory_table.drop() except Exception as e: - print str(e) + print(str(e)) log.exception("Dropping %s table failed: %s" % (JobStateHistory_table.name, str( e ) ) ) diff --git a/lib/galaxy/model/migrate/versions/0125_workflow_step_tracking.py b/lib/galaxy/model/migrate/versions/0125_workflow_step_tracking.py index 2aebdc78c09a..a9086804c7e7 100644 --- a/lib/galaxy/model/migrate/versions/0125_workflow_step_tracking.py +++ b/lib/galaxy/model/migrate/versions/0125_workflow_step_tracking.py @@ -15,7 +15,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() StepLabel_column = Column( "label", TrimmedString(255) ) @@ -37,7 +37,7 @@ def __add_column(column, table_name, metadata, **kwds): table = Table( table_name, metadata, autoload=True ) column.create( table, **kwds ) except Exception as e: - print str(e) + print(str(e)) log.exception( "Adding column %s failed." % column) @@ -46,5 +46,5 @@ def __drop_column( column_name, table_name, metadata ): table = Table( table_name, metadata, autoload=True ) getattr( table.c, column_name ).drop() except Exception as e: - print str(e) + print(str(e)) log.exception( "Dropping column %s failed." % column_name ) diff --git a/lib/galaxy/model/migrate/versions/0126_password_reset.py b/lib/galaxy/model/migrate/versions/0126_password_reset.py index 63cacb803a39..66dbdf5b6fc6 100644 --- a/lib/galaxy/model/migrate/versions/0126_password_reset.py +++ b/lib/galaxy/model/migrate/versions/0126_password_reset.py @@ -18,12 +18,12 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() try: PasswordResetToken_table.create() except Exception as e: - print str(e) + print(str(e)) log.exception("Creating %s table failed: %s" % (PasswordResetToken_table.name, str( e ) ) ) @@ -33,5 +33,5 @@ def downgrade(migrate_engine): try: PasswordResetToken_table.drop() except Exception as e: - print str(e) + print(str(e)) log.exception("Dropping %s table failed: %s" % (PasswordResetToken_table.name, str( e ) ) ) diff --git a/lib/galaxy/model/migrate/versions/0127_output_collection_adjustments.py b/lib/galaxy/model/migrate/versions/0127_output_collection_adjustments.py index 65355aba28dc..c1cca338f24c 100644 --- a/lib/galaxy/model/migrate/versions/0127_output_collection_adjustments.py +++ b/lib/galaxy/model/migrate/versions/0127_output_collection_adjustments.py @@ -28,7 +28,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() for table in TABLES: @@ -43,7 +43,7 @@ def upgrade(migrate_engine): populated_message_column = Column( 'populated_state_message', TEXT, nullable=True ) populated_message_column.create( dataset_collection_table ) except Exception as e: - print str(e) + print(str(e)) log.exception( "Creating dataset collection populated column failed." ) @@ -61,7 +61,7 @@ def downgrade(migrate_engine): populated_message_column = dataset_collection_table.c.populated_state_message populated_message_column.drop() except Exception as e: - print str(e) + print(str(e)) log.exception( "Dropping dataset collection populated_state/ column failed." ) @@ -69,7 +69,7 @@ def __create(table): try: table.create() except Exception as e: - print str(e) + print(str(e)) log.exception("Creating %s table failed: %s" % (table.name, str( e ) ) ) @@ -77,5 +77,5 @@ def __drop(table): try: table.drop() except Exception as e: - print str(e) + print(str(e)) log.exception("Dropping %s table failed: %s" % (table.name, str( e ) ) ) diff --git a/lib/galaxy/model/migrate/versions/0128_session_timeout.py b/lib/galaxy/model/migrate/versions/0128_session_timeout.py index fc63674a88b7..7914047ac917 100644 --- a/lib/galaxy/model/migrate/versions/0128_session_timeout.py +++ b/lib/galaxy/model/migrate/versions/0128_session_timeout.py @@ -13,7 +13,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() lastaction_column = Column( "last_action", DateTime ) @@ -32,7 +32,7 @@ def __add_column(column, table_name, metadata, **kwds): table = Table( table_name, metadata, autoload=True ) column.create( table, **kwds ) except Exception as e: - print str(e) + print(str(e)) log.exception( "Adding column %s failed." % column) @@ -41,5 +41,5 @@ def __drop_column( column_name, table_name, metadata ): table = Table( table_name, metadata, autoload=True ) getattr( table.c, column_name ).drop() except Exception as e: - print str(e) + print(str(e)) log.exception( "Dropping column %s failed." % column_name ) diff --git a/lib/galaxy/model/migrate/versions/0129_job_external_output_metadata_validity.py b/lib/galaxy/model/migrate/versions/0129_job_external_output_metadata_validity.py index b158ee822242..fe5dca741f2e 100644 --- a/lib/galaxy/model/migrate/versions/0129_job_external_output_metadata_validity.py +++ b/lib/galaxy/model/migrate/versions/0129_job_external_output_metadata_validity.py @@ -13,7 +13,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() isvalid_column = Column( "is_valid", Boolean, default=True ) @@ -32,7 +32,7 @@ def __add_column(column, table_name, metadata, **kwds): table = Table( table_name, metadata, autoload=True ) column.create( table, **kwds ) except Exception as e: - print str(e) + print(str(e)) log.exception( "Adding column %s failed." % column) @@ -41,5 +41,5 @@ def __drop_column( column_name, table_name, metadata ): table = Table( table_name, metadata, autoload=True ) getattr( table.c, column_name ).drop() except Exception as e: - print str(e) + print(str(e)) log.exception( "Dropping column %s failed." % column_name ) diff --git a/lib/galaxy/model/migrate/versions/0130_change_pref_datatype.py b/lib/galaxy/model/migrate/versions/0130_change_pref_datatype.py index 4c9acf48250b..220268fa5cc2 100644 --- a/lib/galaxy/model/migrate/versions/0130_change_pref_datatype.py +++ b/lib/galaxy/model/migrate/versions/0130_change_pref_datatype.py @@ -15,7 +15,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() t = Table("user_preference", metadata, autoload=True) t.c.value.alter(type=Text) diff --git a/lib/galaxy/model/migrate/versions/0131_subworkflow_and_input_parameter_modules.py b/lib/galaxy/model/migrate/versions/0131_subworkflow_and_input_parameter_modules.py index b831e7e4583b..ad1e1c485e42 100644 --- a/lib/galaxy/model/migrate/versions/0131_subworkflow_and_input_parameter_modules.py +++ b/lib/galaxy/model/migrate/versions/0131_subworkflow_and_input_parameter_modules.py @@ -47,7 +47,7 @@ def upgrade(migrate_engine): metadata.bind = migrate_engine - print __doc__ + print(__doc__) metadata.reflect() if migrate_engine.name in ['postgres', 'postgresql']: subworkflow_id_column = Column( "subworkflow_id", Integer, ForeignKey("workflow.id"), nullable=True ) @@ -95,7 +95,7 @@ def __alter_column(table_name, column_name, metadata, **kwds): table = Table( table_name, metadata, autoload=True ) getattr( table.c, column_name ).alter(**kwds) except Exception as e: - print str(e) + print(str(e)) log.exception( "Adding column %s failed." % column_name) @@ -104,7 +104,7 @@ def __add_column(column, table_name, metadata, **kwds): table = Table( table_name, metadata, autoload=True ) column.create( table, **kwds ) except Exception as e: - print str(e) + print(str(e)) log.exception( "Adding column %s failed." % column) @@ -113,7 +113,7 @@ def __drop_column( column_name, table_name, metadata ): table = Table( table_name, metadata, autoload=True ) getattr( table.c, column_name ).drop() except Exception as e: - print str(e) + print(str(e)) log.exception( "Dropping column %s failed." % column_name ) @@ -121,7 +121,7 @@ def __create(table): try: table.create() except Exception as e: - print str(e) + print(str(e)) log.exception("Creating %s table failed: %s" % (table.name, str( e ) ) ) @@ -129,5 +129,5 @@ def __drop(table): try: table.drop() except Exception as e: - print str(e) + print(str(e)) log.exception("Dropping %s table failed: %s" % (table.name, str( e ) ) ) From 6a870a7345353d2ad24b0d9280d87dcd3770325e Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Sun, 26 Jun 2016 18:22:53 -0400 Subject: [PATCH 55/86] Simplify .ci/py3_sources.txt --- .ci/py3_sources.txt | 125 +------------------------------------------- 1 file changed, 1 insertion(+), 124 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index 46b0bf5ff529..84fc6be84d14 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -32,130 +32,7 @@ lib/galaxy/forms/ lib/galaxy/jobs/ lib/galaxy/managers/ lib/galaxy/model/__init__.py -lib/galaxy/model/migrate/versions/0006_change_qual_datatype.py -lib/galaxy/model/migrate/versions/0007_sharing_histories.py -lib/galaxy/model/migrate/versions/0008_galaxy_forms.py -lib/galaxy/model/migrate/versions/0009_request_table.py -lib/galaxy/model/migrate/versions/0010_hda_display_at_authz_table.py -lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py -lib/galaxy/model/migrate/versions/0012_user_address.py -lib/galaxy/model/migrate/versions/0013_change_lib_item_templates_to_forms.py -lib/galaxy/model/migrate/versions/0014_pages.py -lib/galaxy/model/migrate/versions/0015_tagging.py -lib/galaxy/model/migrate/versions/0016_v0015_mysql_index_fix.py -lib/galaxy/model/migrate/versions/0017_library_item_indexes.py -lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py -lib/galaxy/model/migrate/versions/0019_request_library_folder.py -lib/galaxy/model/migrate/versions/0020_library_upload_job.py -lib/galaxy/model/migrate/versions/0021_user_prefs.py -lib/galaxy/model/migrate/versions/0022_visualization_tables.py -lib/galaxy/model/migrate/versions/0023_page_published_and_deleted_columns.py -lib/galaxy/model/migrate/versions/0024_page_slug_unique_constraint.py -lib/galaxy/model/migrate/versions/0025_user_info.py -lib/galaxy/model/migrate/versions/0026_cloud_tables.py -lib/galaxy/model/migrate/versions/0027_request_events.py -lib/galaxy/model/migrate/versions/0028_external_metadata_file_override.py -lib/galaxy/model/migrate/versions/0029_user_actions.py -lib/galaxy/model/migrate/versions/0030_history_slug_column.py -lib/galaxy/model/migrate/versions/0031_community_and_workflow_tags.py -lib/galaxy/model/migrate/versions/0032_stored_workflow_slug_column.py -lib/galaxy/model/migrate/versions/0033_published_cols_for_histories_and_workflows.py -lib/galaxy/model/migrate/versions/0034_page_user_share_association.py -lib/galaxy/model/migrate/versions/0035_item_annotations_and_workflow_step_tags.py -lib/galaxy/model/migrate/versions/0036_add_deleted_column_to_library_template_assoc_tables.py -lib/galaxy/model/migrate/versions/0037_samples_library.py -lib/galaxy/model/migrate/versions/0038_add_inheritable_column_to_library_template_assoc_tables.py -lib/galaxy/model/migrate/versions/0039_add_synopsis_column_to_library_table.py -lib/galaxy/model/migrate/versions/0040_page_annotations.py -lib/galaxy/model/migrate/versions/0041_workflow_invocation.py -lib/galaxy/model/migrate/versions/0042_workflow_invocation_fix.py -lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py -lib/galaxy/model/migrate/versions/0044_add_notify_column_to_request_table.py -lib/galaxy/model/migrate/versions/0045_request_type_permissions_table.py -lib/galaxy/model/migrate/versions/0046_post_job_actions.py -lib/galaxy/model/migrate/versions/0047_job_table_user_id_column.py -lib/galaxy/model/migrate/versions/0048_dataset_instance_state_column.py -lib/galaxy/model/migrate/versions/0049_api_keys_table.py -lib/galaxy/model/migrate/versions/0050_drop_cloud_tables.py -lib/galaxy/model/migrate/versions/0051_imported_col_for_jobs_table.py -lib/galaxy/model/migrate/versions/0052_sample_dataset_table.py -lib/galaxy/model/migrate/versions/0053_item_ratings.py -lib/galaxy/model/migrate/versions/0054_visualization_dbkey.py -lib/galaxy/model/migrate/versions/0055_add_pja_assoc_for_jobs.py -lib/galaxy/model/migrate/versions/0056_workflow_outputs.py -lib/galaxy/model/migrate/versions/0057_request_notify.py -lib/galaxy/model/migrate/versions/0058_history_import_export.py -lib/galaxy/model/migrate/versions/0059_sample_dataset_file_path.py -lib/galaxy/model/migrate/versions/0060_history_archive_import.py -lib/galaxy/model/migrate/versions/0061_tasks.py -lib/galaxy/model/migrate/versions/0062_user_openid_table.py -lib/galaxy/model/migrate/versions/0063_sequencer_table.py -lib/galaxy/model/migrate/versions/0064_add_run_and_sample_run_association_tables.py -lib/galaxy/model/migrate/versions/0065_add_name_to_form_fields_and_values.py -lib/galaxy/model/migrate/versions/0066_deferred_job_and_transfer_job_tables.py -lib/galaxy/model/migrate/versions/0067_populate_sequencer_table.py -lib/galaxy/model/migrate/versions/0068_rename_sequencer_to_external_services.py -lib/galaxy/model/migrate/versions/0069_rename_sequencer_form_type.py -lib/galaxy/model/migrate/versions/0070_add_info_column_to_deferred_job_table.py -lib/galaxy/model/migrate/versions/0071_add_history_and_workflow_to_sample.py -lib/galaxy/model/migrate/versions/0072_add_pid_and_socket_columns_to_transfer_job_table.py -lib/galaxy/model/migrate/versions/0073_add_ldda_to_implicit_conversion_table.py -lib/galaxy/model/migrate/versions/0074_add_purged_column_to_library_dataset_table.py -lib/galaxy/model/migrate/versions/0075_add_subindex_column_to_run_table.py -lib/galaxy/model/migrate/versions/0076_fix_form_values_data_corruption.py -lib/galaxy/model/migrate/versions/0077_create_tool_tag_association_table.py -lib/galaxy/model/migrate/versions/0078_add_columns_for_disk_usage_accounting.py -lib/galaxy/model/migrate/versions/0079_input_library_to_job_table.py -lib/galaxy/model/migrate/versions/0080_quota_tables.py -lib/galaxy/model/migrate/versions/0081_add_tool_version_to_hda_ldda.py -lib/galaxy/model/migrate/versions/0082_add_tool_shed_repository_table.py -lib/galaxy/model/migrate/versions/0083_add_prepare_files_to_task.py -lib/galaxy/model/migrate/versions/0084_add_ldda_id_to_implicit_conversion_table.py -lib/galaxy/model/migrate/versions/0085_add_task_info.py -lib/galaxy/model/migrate/versions/0086_add_tool_shed_repository_table_columns.py -lib/galaxy/model/migrate/versions/0087_tool_id_guid_map_table.py -lib/galaxy/model/migrate/versions/0088_add_installed_changeset_revison_column.py -lib/galaxy/model/migrate/versions/0089_add_object_store_id_columns.py -lib/galaxy/model/migrate/versions/0090_add_tool_shed_repository_table_columns.py -lib/galaxy/model/migrate/versions/0091_add_tool_version_tables.py -lib/galaxy/model/migrate/versions/0092_add_migrate_tools_table.py -lib/galaxy/model/migrate/versions/0093_add_job_params_col.py -lib/galaxy/model/migrate/versions/0094_add_job_handler_col.py -lib/galaxy/model/migrate/versions/0095_hda_subsets.py -lib/galaxy/model/migrate/versions/0096_openid_provider.py -lib/galaxy/model/migrate/versions/0097_add_ctx_rev_column.py -lib/galaxy/model/migrate/versions/0098_genome_index_tool_data_table.py -lib/galaxy/model/migrate/versions/0099_add_tool_dependency_table.py -lib/galaxy/model/migrate/versions/0100_alter_tool_dependency_table_version_column.py -lib/galaxy/model/migrate/versions/0101_drop_installed_changeset_revision_column.py -lib/galaxy/model/migrate/versions/0102_add_tool_dependency_status_columns.py -lib/galaxy/model/migrate/versions/0103_add_tool_shed_repository_status_columns.py -lib/galaxy/model/migrate/versions/0105_add_cleanup_event_table.py -lib/galaxy/model/migrate/versions/0106_add_missing_indexes.py -lib/galaxy/model/migrate/versions/0107_add_exit_code_to_job_and_task.py -lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py -lib/galaxy/model/migrate/versions/0109_add_repository_dependency_tables.py -lib/galaxy/model/migrate/versions/0110_add_dataset_uuid.py -lib/galaxy/model/migrate/versions/0111_add_job_destinations.py -lib/galaxy/model/migrate/versions/0112_add_data_manager_history_association_and_data_manager_job_association_tables.py -lib/galaxy/model/migrate/versions/0113_update_migrate_tools_table.py -lib/galaxy/model/migrate/versions/0114_update_migrate_tools_table_again.py -lib/galaxy/model/migrate/versions/0116_drop_update_available_col_add_tool_shed_status_col.py -lib/galaxy/model/migrate/versions/0117_add_user_activation.py -lib/galaxy/model/migrate/versions/0118_add_hda_extended_metadata.py -lib/galaxy/model/migrate/versions/0119_job_metrics.py -lib/galaxy/model/migrate/versions/0120_dataset_collections.py -lib/galaxy/model/migrate/versions/0121_workflow_uuids.py -lib/galaxy/model/migrate/versions/0122_grow_mysql_blobs.py -lib/galaxy/model/migrate/versions/0123_add_workflow_request_tables.py -lib/galaxy/model/migrate/versions/0124_job_state_history.py -lib/galaxy/model/migrate/versions/0125_workflow_step_tracking.py -lib/galaxy/model/migrate/versions/0126_password_reset.py -lib/galaxy/model/migrate/versions/0127_output_collection_adjustments.py -lib/galaxy/model/migrate/versions/0128_session_timeout.py -lib/galaxy/model/migrate/versions/0129_job_external_output_metadata_validity.py -lib/galaxy/model/migrate/versions/0130_change_pref_datatype.py -lib/galaxy/model/migrate/versions/0131_subworkflow_and_input_parameter_modules.py +lib/galaxy/model/migrate/ lib/galaxy/objectstore/ lib/galaxy/openid/ lib/galaxy/quota/ From 5feefc73b367c09be6775c1372f0872830ce448d Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Mon, 27 Jun 2016 10:52:25 -0400 Subject: [PATCH 56/86] Simplify .ci/py3_sources.txt --- .ci/py3_sources.txt | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index 84fc6be84d14..398307358788 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -5,21 +5,7 @@ lib/galaxy/auth/ lib/galaxy/config.py lib/galaxy/dataset_collections/ lib/galaxy/datatypes/binary.py -lib/galaxy/datatypes/converters/bed_to_gff_converter.py -lib/galaxy/datatypes/converters/fastq_to_fqtoc.py -lib/galaxy/datatypes/converters/gff_to_bed_converter.py -lib/galaxy/datatypes/converters/interval_to_bed_converter.py -lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.py -lib/galaxy/datatypes/converters/interval_to_coverage.py -lib/galaxy/datatypes/converters/lped_to_fped_converter.py -lib/galaxy/datatypes/converters/lped_to_pbed_converter.py -lib/galaxy/datatypes/converters/maf_to_fasta_converter.py -lib/galaxy/datatypes/converters/maf_to_interval_converter.py -lib/galaxy/datatypes/converters/pbed_ldreduced_converter.py -lib/galaxy/datatypes/converters/pbed_to_lped_converter.py -lib/galaxy/datatypes/converters/picard_interval_list_to_bed6_converter.py -lib/galaxy/datatypes/converters/ref_to_seq_taxonomy_converter.py -lib/galaxy/datatypes/converters/wiggle_to_simple_converter.py +lib/galaxy/datatypes/converters/ lib/galaxy/datatypes/dataproviders/ lib/galaxy/datatypes/sequence.py lib/galaxy/datatypes/sniff.py From 40f85f291327c217d7603f59c5454f38016d1e71 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Mon, 27 Jun 2016 12:41:40 -0400 Subject: [PATCH 57/86] Python3: add missing print_function imports --- lib/galaxy/datatypes/converters/bed_to_gff_converter.py | 2 ++ lib/galaxy/datatypes/converters/fastq_to_fqtoc.py | 2 ++ lib/galaxy/datatypes/converters/gff_to_bed_converter.py | 2 ++ .../datatypes/converters/interval_to_bed_converter.py | 2 ++ .../converters/interval_to_bedstrict_converter.py | 2 ++ lib/galaxy/datatypes/converters/lped_to_fped_converter.py | 4 ++-- lib/galaxy/datatypes/converters/lped_to_pbed_converter.py | 6 ++++-- lib/galaxy/datatypes/converters/maf_to_fasta_converter.py | 2 ++ .../datatypes/converters/maf_to_interval_converter.py | 2 +- .../datatypes/converters/pbed_ldreduced_converter.py | 2 +- lib/galaxy/datatypes/converters/pbed_to_lped_converter.py | 2 ++ .../converters/picard_interval_list_to_bed6_converter.py | 1 + .../datatypes/converters/wiggle_to_simple_converter.py | 3 ++- .../model/migrate/versions/0006_change_qual_datatype.py | 2 ++ .../model/migrate/versions/0007_sharing_histories.py | 2 ++ lib/galaxy/model/migrate/versions/0008_galaxy_forms.py | 2 ++ lib/galaxy/model/migrate/versions/0009_request_table.py | 2 ++ .../migrate/versions/0010_hda_display_at_authz_table.py | 2 ++ .../model/migrate/versions/0011_v0010_mysql_index_fix.py | 2 ++ lib/galaxy/model/migrate/versions/0012_user_address.py | 2 ++ .../versions/0013_change_lib_item_templates_to_forms.py | 2 ++ lib/galaxy/model/migrate/versions/0014_pages.py | 2 ++ lib/galaxy/model/migrate/versions/0015_tagging.py | 2 ++ .../model/migrate/versions/0016_v0015_mysql_index_fix.py | 2 ++ .../model/migrate/versions/0017_library_item_indexes.py | 2 ++ .../migrate/versions/0018_ordered_tags_and_page_tags.py | 2 ++ .../model/migrate/versions/0019_request_library_folder.py | 2 ++ .../model/migrate/versions/0020_library_upload_job.py | 2 ++ lib/galaxy/model/migrate/versions/0021_user_prefs.py | 2 ++ .../model/migrate/versions/0022_visualization_tables.py | 2 ++ .../versions/0023_page_published_and_deleted_columns.py | 2 ++ .../migrate/versions/0024_page_slug_unique_constraint.py | 2 ++ lib/galaxy/model/migrate/versions/0025_user_info.py | 2 ++ lib/galaxy/model/migrate/versions/0026_cloud_tables.py | 2 ++ lib/galaxy/model/migrate/versions/0027_request_events.py | 2 ++ .../versions/0028_external_metadata_file_override.py | 2 ++ lib/galaxy/model/migrate/versions/0029_user_actions.py | 2 ++ .../model/migrate/versions/0030_history_slug_column.py | 2 ++ .../migrate/versions/0031_community_and_workflow_tags.py | 2 ++ .../migrate/versions/0032_stored_workflow_slug_column.py | 2 ++ .../0033_published_cols_for_histories_and_workflows.py | 2 ++ .../migrate/versions/0034_page_user_share_association.py | 2 ++ .../0035_item_annotations_and_workflow_step_tags.py | 2 ++ ...add_deleted_column_to_library_template_assoc_tables.py | 2 ++ lib/galaxy/model/migrate/versions/0037_samples_library.py | 2 ++ ...inheritable_column_to_library_template_assoc_tables.py | 2 ++ .../versions/0039_add_synopsis_column_to_library_table.py | 2 ++ .../model/migrate/versions/0040_page_annotations.py | 2 ++ .../model/migrate/versions/0041_workflow_invocation.py | 2 ++ .../migrate/versions/0042_workflow_invocation_fix.py | 2 ++ .../0043_visualization_sharing_tagging_annotating.py | 2 ++ .../versions/0044_add_notify_column_to_request_table.py | 2 ++ .../versions/0045_request_type_permissions_table.py | 2 ++ .../model/migrate/versions/0046_post_job_actions.py | 2 ++ .../migrate/versions/0047_job_table_user_id_column.py | 2 ++ .../versions/0048_dataset_instance_state_column.py | 2 ++ lib/galaxy/model/migrate/versions/0049_api_keys_table.py | 2 ++ .../model/migrate/versions/0050_drop_cloud_tables.py | 2 ++ .../migrate/versions/0051_imported_col_for_jobs_table.py | 2 ++ .../model/migrate/versions/0052_sample_dataset_table.py | 2 ++ lib/galaxy/model/migrate/versions/0053_item_ratings.py | 2 ++ .../model/migrate/versions/0054_visualization_dbkey.py | 2 ++ .../model/migrate/versions/0055_add_pja_assoc_for_jobs.py | 2 ++ .../model/migrate/versions/0056_workflow_outputs.py | 2 ++ lib/galaxy/model/migrate/versions/0057_request_notify.py | 2 ++ .../model/migrate/versions/0058_history_import_export.py | 2 ++ .../migrate/versions/0059_sample_dataset_file_path.py | 2 ++ .../model/migrate/versions/0060_history_archive_import.py | 2 ++ lib/galaxy/model/migrate/versions/0061_tasks.py | 2 ++ .../model/migrate/versions/0062_user_openid_table.py | 2 ++ lib/galaxy/model/migrate/versions/0063_sequencer_table.py | 2 ++ .../0064_add_run_and_sample_run_association_tables.py | 2 ++ .../versions/0065_add_name_to_form_fields_and_values.py | 2 ++ .../versions/0066_deferred_job_and_transfer_job_tables.py | 2 ++ .../migrate/versions/0067_populate_sequencer_table.py | 2 ++ .../0068_rename_sequencer_to_external_services.py | 2 ++ .../migrate/versions/0069_rename_sequencer_form_type.py | 2 ++ .../0070_add_info_column_to_deferred_job_table.py | 2 ++ .../versions/0071_add_history_and_workflow_to_sample.py | 2 ++ ...72_add_pid_and_socket_columns_to_transfer_job_table.py | 2 ++ .../0073_add_ldda_to_implicit_conversion_table.py | 2 ++ .../0074_add_purged_column_to_library_dataset_table.py | 2 ++ .../versions/0075_add_subindex_column_to_run_table.py | 2 ++ .../versions/0076_fix_form_values_data_corruption.py | 2 ++ .../versions/0077_create_tool_tag_association_table.py | 2 ++ .../0078_add_columns_for_disk_usage_accounting.py | 2 ++ .../migrate/versions/0079_input_library_to_job_table.py | 2 ++ lib/galaxy/model/migrate/versions/0080_quota_tables.py | 2 ++ .../migrate/versions/0081_add_tool_version_to_hda_ldda.py | 2 ++ .../versions/0082_add_tool_shed_repository_table.py | 2 ++ .../migrate/versions/0083_add_prepare_files_to_task.py | 2 ++ .../0084_add_ldda_id_to_implicit_conversion_table.py | 2 ++ lib/galaxy/model/migrate/versions/0085_add_task_info.py | 2 ++ .../0086_add_tool_shed_repository_table_columns.py | 2 ++ .../model/migrate/versions/0087_tool_id_guid_map_table.py | 2 ++ .../0088_add_installed_changeset_revison_column.py | 2 ++ .../migrate/versions/0089_add_object_store_id_columns.py | 2 ++ .../0090_add_tool_shed_repository_table_columns.py | 2 ++ .../migrate/versions/0091_add_tool_version_tables.py | 2 ++ .../migrate/versions/0092_add_migrate_tools_table.py | 2 ++ .../model/migrate/versions/0093_add_job_params_col.py | 2 ++ .../model/migrate/versions/0094_add_job_handler_col.py | 2 ++ lib/galaxy/model/migrate/versions/0095_hda_subsets.py | 2 ++ lib/galaxy/model/migrate/versions/0096_openid_provider.py | 2 ++ .../model/migrate/versions/0097_add_ctx_rev_column.py | 2 ++ .../migrate/versions/0098_genome_index_tool_data_table.py | 2 ++ .../migrate/versions/0099_add_tool_dependency_table.py | 2 ++ .../0100_alter_tool_dependency_table_version_column.py | 2 ++ .../0101_drop_installed_changeset_revision_column.py | 2 ++ .../versions/0102_add_tool_dependency_status_columns.py | 2 ++ .../0103_add_tool_shed_repository_status_columns.py | 2 ++ .../migrate/versions/0105_add_cleanup_event_table.py | 2 ++ .../model/migrate/versions/0106_add_missing_indexes.py | 2 ++ .../versions/0107_add_exit_code_to_job_and_task.py | 2 ++ .../model/migrate/versions/0108_add_extended_metadata.py | 2 ++ .../versions/0109_add_repository_dependency_tables.py | 2 ++ .../model/migrate/versions/0110_add_dataset_uuid.py | 2 ++ .../model/migrate/versions/0111_add_job_destinations.py | 2 ++ ...association_and_data_manager_job_association_tables.py | 2 ++ .../migrate/versions/0113_update_migrate_tools_table.py | 2 ++ .../versions/0114_update_migrate_tools_table_again.py | 2 ++ ..._drop_update_available_col_add_tool_shed_status_col.py | 2 ++ .../model/migrate/versions/0117_add_user_activation.py | 2 ++ .../migrate/versions/0118_add_hda_extended_metadata.py | 2 ++ lib/galaxy/model/migrate/versions/0119_job_metrics.py | 2 ++ .../model/migrate/versions/0120_dataset_collections.py | 2 ++ lib/galaxy/model/migrate/versions/0121_workflow_uuids.py | 2 ++ .../model/migrate/versions/0122_grow_mysql_blobs.py | 1 + .../migrate/versions/0123_add_workflow_request_tables.py | 2 ++ .../model/migrate/versions/0124_job_state_history.py | 2 ++ .../model/migrate/versions/0125_workflow_step_tracking.py | 2 ++ lib/galaxy/model/migrate/versions/0126_password_reset.py | 2 ++ .../versions/0127_output_collection_adjustments.py | 2 ++ lib/galaxy/model/migrate/versions/0128_session_timeout.py | 2 ++ .../0129_job_external_output_metadata_validity.py | 2 ++ .../model/migrate/versions/0130_change_pref_datatype.py | 8 ++++---- .../0131_subworkflow_and_input_parameter_modules.py | 2 ++ test/api/test_workflows.py | 2 ++ 138 files changed, 276 insertions(+), 11 deletions(-) diff --git a/lib/galaxy/datatypes/converters/bed_to_gff_converter.py b/lib/galaxy/datatypes/converters/bed_to_gff_converter.py index c67f798a1323..b1d7df54732a 100644 --- a/lib/galaxy/datatypes/converters/bed_to_gff_converter.py +++ b/lib/galaxy/datatypes/converters/bed_to_gff_converter.py @@ -1,5 +1,7 @@ #!/usr/bin/env python # This code exists in 2 places: ~/datatypes/converters and ~/tools/filters +from __future__ import print_function + import sys assert sys.version_info[:2] >= ( 2, 4 ) diff --git a/lib/galaxy/datatypes/converters/fastq_to_fqtoc.py b/lib/galaxy/datatypes/converters/fastq_to_fqtoc.py index 4f5d9e525f60..5a90201de8f1 100644 --- a/lib/galaxy/datatypes/converters/fastq_to_fqtoc.py +++ b/lib/galaxy/datatypes/converters/fastq_to_fqtoc.py @@ -1,6 +1,8 @@ #!/usr/bin/env python +from __future__ import print_function import sys + from galaxy.util.checkers import is_gzip diff --git a/lib/galaxy/datatypes/converters/gff_to_bed_converter.py b/lib/galaxy/datatypes/converters/gff_to_bed_converter.py index d2ea88c0ef6b..ee82cde96016 100644 --- a/lib/galaxy/datatypes/converters/gff_to_bed_converter.py +++ b/lib/galaxy/datatypes/converters/gff_to_bed_converter.py @@ -1,4 +1,6 @@ #!/usr/bin/env python +from __future__ import print_function + import sys assert sys.version_info[:2] >= ( 2, 4 ) diff --git a/lib/galaxy/datatypes/converters/interval_to_bed_converter.py b/lib/galaxy/datatypes/converters/interval_to_bed_converter.py index 9e9022bd1be0..67b57fcde9ed 100644 --- a/lib/galaxy/datatypes/converters/interval_to_bed_converter.py +++ b/lib/galaxy/datatypes/converters/interval_to_bed_converter.py @@ -1,5 +1,7 @@ #!/usr/bin/env python # Dan Blankenberg +from __future__ import print_function + import sys import bx.intervals.io diff --git a/lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.py b/lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.py index 64a61825c92a..d4bf8ca3b472 100644 --- a/lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.py +++ b/lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.py @@ -1,5 +1,7 @@ #!/usr/bin/env python # Dan Blankenberg +from __future__ import print_function + import sys import bx.intervals.io diff --git a/lib/galaxy/datatypes/converters/lped_to_fped_converter.py b/lib/galaxy/datatypes/converters/lped_to_fped_converter.py index de0c14a340bc..17744825f062 100644 --- a/lib/galaxy/datatypes/converters/lped_to_fped_converter.py +++ b/lib/galaxy/datatypes/converters/lped_to_fped_converter.py @@ -2,12 +2,12 @@ # recode to numeric fbat version # much slower so best to always # use numeric alleles internally +from __future__ import print_function -import sys import os +import sys import time - prog = os.path.split(sys.argv[0])[-1] myversion = 'Oct 10 2009' diff --git a/lib/galaxy/datatypes/converters/lped_to_pbed_converter.py b/lib/galaxy/datatypes/converters/lped_to_pbed_converter.py index 84e1c10d4796..fc7ef10adbc3 100644 --- a/lib/galaxy/datatypes/converters/lped_to_pbed_converter.py +++ b/lib/galaxy/datatypes/converters/lped_to_pbed_converter.py @@ -4,10 +4,12 @@ # eg lped/eigen/fbat/snpmatrix all to pbed # and pbed to lped/eigen/fbat/snpmatrix ? # that's a lot of converters -import sys +from __future__ import print_function + import os -import time import subprocess +import sys +import time prog = os.path.split(sys.argv[0])[-1] myversion = 'Oct 10 2009' diff --git a/lib/galaxy/datatypes/converters/maf_to_fasta_converter.py b/lib/galaxy/datatypes/converters/maf_to_fasta_converter.py index 1d13db1ddfbe..4a6fa4774a3e 100644 --- a/lib/galaxy/datatypes/converters/maf_to_fasta_converter.py +++ b/lib/galaxy/datatypes/converters/maf_to_fasta_converter.py @@ -1,5 +1,7 @@ #!/usr/bin/env python # Dan Blankenberg +from __future__ import print_function + import sys import bx.align.maf diff --git a/lib/galaxy/datatypes/converters/maf_to_interval_converter.py b/lib/galaxy/datatypes/converters/maf_to_interval_converter.py index 140d77fe4cee..3e68f2a252cf 100644 --- a/lib/galaxy/datatypes/converters/maf_to_interval_converter.py +++ b/lib/galaxy/datatypes/converters/maf_to_interval_converter.py @@ -1,9 +1,9 @@ #!/usr/bin/env python # Dan Blankenberg - from __future__ import print_function import sys + import bx.align.maf from galaxy.tools.util import maf_utilities diff --git a/lib/galaxy/datatypes/converters/pbed_ldreduced_converter.py b/lib/galaxy/datatypes/converters/pbed_ldreduced_converter.py index 9d45df033923..02a6541fe428 100644 --- a/lib/galaxy/datatypes/converters/pbed_ldreduced_converter.py +++ b/lib/galaxy/datatypes/converters/pbed_ldreduced_converter.py @@ -1,6 +1,6 @@ # converter for ldreduced rgenetics datatype # used for grr and eigenstrat - shellfish if we get around to it -# +from __future__ import print_function import os import sys diff --git a/lib/galaxy/datatypes/converters/pbed_to_lped_converter.py b/lib/galaxy/datatypes/converters/pbed_to_lped_converter.py index 7c2806f3d620..ed45a204db1f 100644 --- a/lib/galaxy/datatypes/converters/pbed_to_lped_converter.py +++ b/lib/galaxy/datatypes/converters/pbed_to_lped_converter.py @@ -4,6 +4,8 @@ # eg lped/eigen/fbat/snpmatrix all to pbed # and pbed to lped/eigen/fbat/snpmatrix ? # that's a lot of converters +from __future__ import print_function + import os import subprocess import sys diff --git a/lib/galaxy/datatypes/converters/picard_interval_list_to_bed6_converter.py b/lib/galaxy/datatypes/converters/picard_interval_list_to_bed6_converter.py index 0a1d9697c67e..36357cbc042e 100644 --- a/lib/galaxy/datatypes/converters/picard_interval_list_to_bed6_converter.py +++ b/lib/galaxy/datatypes/converters/picard_interval_list_to_bed6_converter.py @@ -1,5 +1,6 @@ #!/usr/bin/env python # Dan Blankenberg +from __future__ import print_function import sys diff --git a/lib/galaxy/datatypes/converters/wiggle_to_simple_converter.py b/lib/galaxy/datatypes/converters/wiggle_to_simple_converter.py index 92128149f027..bd40e9c14828 100644 --- a/lib/galaxy/datatypes/converters/wiggle_to_simple_converter.py +++ b/lib/galaxy/datatypes/converters/wiggle_to_simple_converter.py @@ -1,11 +1,12 @@ #!/usr/bin/env python # code is same as ~/tools/stats/wiggle_to_simple.py - """ Read a wiggle track and print out a series of lines containing "chrom position score". Ignores track lines, handles bed, variableStep and fixedStep wiggle lines. """ +from __future__ import print_function + import sys import bx.wiggle diff --git a/lib/galaxy/model/migrate/versions/0006_change_qual_datatype.py b/lib/galaxy/model/migrate/versions/0006_change_qual_datatype.py index 4d0cf1104e16..f81b725bbaba 100644 --- a/lib/galaxy/model/migrate/versions/0006_change_qual_datatype.py +++ b/lib/galaxy/model/migrate/versions/0006_change_qual_datatype.py @@ -2,6 +2,8 @@ This migration script changes certain values in the history_dataset_association.extension column, specifically 'qual' is chaged to be 'qual454'. """ +from __future__ import print_function + import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0007_sharing_histories.py b/lib/galaxy/model/migrate/versions/0007_sharing_histories.py index 599717e60a1f..e2b81763b2ed 100644 --- a/lib/galaxy/model/migrate/versions/0007_sharing_histories.py +++ b/lib/galaxy/model/migrate/versions/0007_sharing_histories.py @@ -3,6 +3,8 @@ a new boolean type column to the history table. This provides support for sharing histories in the same way that workflows are shared. """ +from __future__ import print_function + import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0008_galaxy_forms.py b/lib/galaxy/model/migrate/versions/0008_galaxy_forms.py index 9c0912e1fae0..778d923c0192 100644 --- a/lib/galaxy/model/migrate/versions/0008_galaxy_forms.py +++ b/lib/galaxy/model/migrate/versions/0008_galaxy_forms.py @@ -9,6 +9,8 @@ 7) sample_state 8) sample_event """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0009_request_table.py b/lib/galaxy/model/migrate/versions/0009_request_table.py index da2b21d71c62..536ac69bcd6e 100644 --- a/lib/galaxy/model/migrate/versions/0009_request_table.py +++ b/lib/galaxy/model/migrate/versions/0009_request_table.py @@ -3,6 +3,8 @@ 1) a new boolean type column named 'submitted' to the 'request' table 2) a new string type column named 'bar_code' to the 'sample' table """ +from __future__ import print_function + import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0010_hda_display_at_authz_table.py b/lib/galaxy/model/migrate/versions/0010_hda_display_at_authz_table.py index ca86235123d0..3ac6258f99c2 100644 --- a/lib/galaxy/model/migrate/versions/0010_hda_display_at_authz_table.py +++ b/lib/galaxy/model/migrate/versions/0010_hda_display_at_authz_table.py @@ -8,6 +8,8 @@ (1059, "Identifier name 'ix_history_dataset_association_display_at_authorization_update_time' is too long """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py b/lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py index 5fcf1e6dff2d..b0473ce4722c 100644 --- a/lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py +++ b/lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py @@ -3,6 +3,8 @@ name length limit and thus the index "ix_hdadaa_history_dataset_association_id" has to be manually created. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0012_user_address.py b/lib/galaxy/model/migrate/versions/0012_user_address.py index de22c8b162b2..b3df2e4dc9e2 100644 --- a/lib/galaxy/model/migrate/versions/0012_user_address.py +++ b/lib/galaxy/model/migrate/versions/0012_user_address.py @@ -4,6 +4,8 @@ drops the request.submitted column which was boolean and replaces it with a request.state column which is a string, allowing for more flexibility with request states. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0013_change_lib_item_templates_to_forms.py b/lib/galaxy/model/migrate/versions/0013_change_lib_item_templates_to_forms.py index f781ddde61c6..daa5096a4499 100644 --- a/lib/galaxy/model/migrate/versions/0013_change_lib_item_templates_to_forms.py +++ b/lib/galaxy/model/migrate/versions/0013_change_lib_item_templates_to_forms.py @@ -15,6 +15,8 @@ the library_dataset_dataset_info_association table, which is OK because the script creates an index with a shortened name. """ +from __future__ import print_function + import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0014_pages.py b/lib/galaxy/model/migrate/versions/0014_pages.py index 3bdbfc71fa9c..497886445929 100644 --- a/lib/galaxy/model/migrate/versions/0014_pages.py +++ b/lib/galaxy/model/migrate/versions/0014_pages.py @@ -3,6 +3,8 @@ 1) Creates Page and PageRevision tables 2) Adds username column to User table """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0015_tagging.py b/lib/galaxy/model/migrate/versions/0015_tagging.py index 9fc4c83ed3c5..eacb1dd36589 100644 --- a/lib/galaxy/model/migrate/versions/0015_tagging.py +++ b/lib/galaxy/model/migrate/versions/0015_tagging.py @@ -9,6 +9,8 @@ (1059, "Identifier name 'ix_history_dataset_association_tag_association_history_dataset_association_id' is too long) """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0016_v0015_mysql_index_fix.py b/lib/galaxy/model/migrate/versions/0016_v0015_mysql_index_fix.py index 4890efb57b94..1fbb23438256 100644 --- a/lib/galaxy/model/migrate/versions/0016_v0015_mysql_index_fix.py +++ b/lib/galaxy/model/migrate/versions/0016_v0015_mysql_index_fix.py @@ -3,6 +3,8 @@ limit and thus the index "ix_hda_ta_history_dataset_association_id" has to be manually created. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0017_library_item_indexes.py b/lib/galaxy/model/migrate/versions/0017_library_item_indexes.py index 84c01ea658b7..579827c92ae6 100644 --- a/lib/galaxy/model/migrate/versions/0017_library_item_indexes.py +++ b/lib/galaxy/model/migrate/versions/0017_library_item_indexes.py @@ -2,6 +2,8 @@ This script adds 3 indexes to table columns: library_folder.name, library_dataset.name, library_dataset_dataset_association.name. """ +from __future__ import print_function + import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py b/lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py index 8027a806169c..4788160261fa 100644 --- a/lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py +++ b/lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py @@ -2,6 +2,8 @@ This migration script provides support for (a) ordering tags by recency and (b) tagging pages. This script deletes all existing tags. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0019_request_library_folder.py b/lib/galaxy/model/migrate/versions/0019_request_library_folder.py index 651662292114..2e08da609e74 100644 --- a/lib/galaxy/model/migrate/versions/0019_request_library_folder.py +++ b/lib/galaxy/model/migrate/versions/0019_request_library_folder.py @@ -1,3 +1,5 @@ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0020_library_upload_job.py b/lib/galaxy/model/migrate/versions/0020_library_upload_job.py index b6be694e26f5..8e83b40aa047 100644 --- a/lib/galaxy/model/migrate/versions/0020_library_upload_job.py +++ b/lib/galaxy/model/migrate/versions/0020_library_upload_job.py @@ -1,3 +1,5 @@ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0021_user_prefs.py b/lib/galaxy/model/migrate/versions/0021_user_prefs.py index b117daf3cafe..a60849225719 100644 --- a/lib/galaxy/model/migrate/versions/0021_user_prefs.py +++ b/lib/galaxy/model/migrate/versions/0021_user_prefs.py @@ -1,6 +1,8 @@ """ This migration script adds a user preferences table to Galaxy. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0022_visualization_tables.py b/lib/galaxy/model/migrate/versions/0022_visualization_tables.py index 23172a8def9b..38da7fcaf682 100644 --- a/lib/galaxy/model/migrate/versions/0022_visualization_tables.py +++ b/lib/galaxy/model/migrate/versions/0022_visualization_tables.py @@ -2,6 +2,8 @@ Migration script to add support for storing visualizations. 1) Creates Visualization and VisualizationRevision tables """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0023_page_published_and_deleted_columns.py b/lib/galaxy/model/migrate/versions/0023_page_published_and_deleted_columns.py index b2b5c5e83a46..78fe1dc0d465 100644 --- a/lib/galaxy/model/migrate/versions/0023_page_published_and_deleted_columns.py +++ b/lib/galaxy/model/migrate/versions/0023_page_published_and_deleted_columns.py @@ -2,6 +2,8 @@ Migration script to add columns for tracking whether pages are deleted and publicly accessible. """ +from __future__ import print_function + import logging from sqlalchemy import Boolean, Column, MetaData, Table diff --git a/lib/galaxy/model/migrate/versions/0024_page_slug_unique_constraint.py b/lib/galaxy/model/migrate/versions/0024_page_slug_unique_constraint.py index 49529ef0f30a..08aade5699e4 100644 --- a/lib/galaxy/model/migrate/versions/0024_page_slug_unique_constraint.py +++ b/lib/galaxy/model/migrate/versions/0024_page_slug_unique_constraint.py @@ -2,6 +2,8 @@ Remove unique constraint from page slugs to allow creating a page with the same slug as a deleted page. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0025_user_info.py b/lib/galaxy/model/migrate/versions/0025_user_info.py index 68ea01c95792..11e00785694d 100644 --- a/lib/galaxy/model/migrate/versions/0025_user_info.py +++ b/lib/galaxy/model/migrate/versions/0025_user_info.py @@ -1,6 +1,8 @@ """ This script adds a foreign key to the form_values table in the galaxy_user table """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0026_cloud_tables.py b/lib/galaxy/model/migrate/versions/0026_cloud_tables.py index 06a44729b6f8..474a59468d03 100644 --- a/lib/galaxy/model/migrate/versions/0026_cloud_tables.py +++ b/lib/galaxy/model/migrate/versions/0026_cloud_tables.py @@ -1,3 +1,5 @@ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0027_request_events.py b/lib/galaxy/model/migrate/versions/0027_request_events.py index c4806ff64c4d..081d0e6b92d5 100644 --- a/lib/galaxy/model/migrate/versions/0027_request_events.py +++ b/lib/galaxy/model/migrate/versions/0027_request_events.py @@ -2,6 +2,8 @@ This migration script adds the request_event table and removes the state field in the request table """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0028_external_metadata_file_override.py b/lib/galaxy/model/migrate/versions/0028_external_metadata_file_override.py index 596c0d99b5b8..73be3f51cffb 100644 --- a/lib/galaxy/model/migrate/versions/0028_external_metadata_file_override.py +++ b/lib/galaxy/model/migrate/versions/0028_external_metadata_file_override.py @@ -3,6 +3,8 @@ allowing existing metadata files to be written when using external metadata and a cluster set up with read-only access to database/files """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0029_user_actions.py b/lib/galaxy/model/migrate/versions/0029_user_actions.py index ac546f9e1dec..cd4460c7ea63 100644 --- a/lib/galaxy/model/migrate/versions/0029_user_actions.py +++ b/lib/galaxy/model/migrate/versions/0029_user_actions.py @@ -1,6 +1,8 @@ """ This migration script adds a user actions table to Galaxy. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0030_history_slug_column.py b/lib/galaxy/model/migrate/versions/0030_history_slug_column.py index 4a57937ef5d9..11f05f40b99c 100644 --- a/lib/galaxy/model/migrate/versions/0030_history_slug_column.py +++ b/lib/galaxy/model/migrate/versions/0030_history_slug_column.py @@ -1,6 +1,8 @@ """ Migration script to add column for a history slug. """ +from __future__ import print_function + import logging from sqlalchemy import Column, Index, MetaData, Table, TEXT diff --git a/lib/galaxy/model/migrate/versions/0031_community_and_workflow_tags.py b/lib/galaxy/model/migrate/versions/0031_community_and_workflow_tags.py index a7a4b737c54f..2ebe57c2bcd6 100644 --- a/lib/galaxy/model/migrate/versions/0031_community_and_workflow_tags.py +++ b/lib/galaxy/model/migrate/versions/0031_community_and_workflow_tags.py @@ -5,6 +5,8 @@ SQLite does not support 'ALTER TABLE ADD FOREIGN KEY', so this script will generate error messages when run against \ SQLite; however, script does execute successfully against SQLite. """ +from __future__ import print_function + import logging from sqlalchemy import Column, ForeignKey, Integer, MetaData, Table, Unicode diff --git a/lib/galaxy/model/migrate/versions/0032_stored_workflow_slug_column.py b/lib/galaxy/model/migrate/versions/0032_stored_workflow_slug_column.py index 6adb4a4e9af6..7f30dde6072c 100644 --- a/lib/galaxy/model/migrate/versions/0032_stored_workflow_slug_column.py +++ b/lib/galaxy/model/migrate/versions/0032_stored_workflow_slug_column.py @@ -1,6 +1,8 @@ """ Migration script to add slug column for stored workflow. """ +from __future__ import print_function + import logging from sqlalchemy import Column, Index, MetaData, Table, TEXT diff --git a/lib/galaxy/model/migrate/versions/0033_published_cols_for_histories_and_workflows.py b/lib/galaxy/model/migrate/versions/0033_published_cols_for_histories_and_workflows.py index 32509128bab4..616eaec95b2e 100644 --- a/lib/galaxy/model/migrate/versions/0033_published_cols_for_histories_and_workflows.py +++ b/lib/galaxy/model/migrate/versions/0033_published_cols_for_histories_and_workflows.py @@ -2,6 +2,8 @@ Migration script to add necessary columns for distinguishing between viewing/importing and publishing histories, \ workflows, and pages. Script adds published column to histories and workflows and importable column to pages. """ +from __future__ import print_function + import logging from sqlalchemy import Boolean, Column, Index, MetaData, Table diff --git a/lib/galaxy/model/migrate/versions/0034_page_user_share_association.py b/lib/galaxy/model/migrate/versions/0034_page_user_share_association.py index fd2489017108..b40e04e522b3 100644 --- a/lib/galaxy/model/migrate/versions/0034_page_user_share_association.py +++ b/lib/galaxy/model/migrate/versions/0034_page_user_share_association.py @@ -1,6 +1,8 @@ """ Migration script to create a table for page-user share association. """ +from __future__ import print_function + import logging from sqlalchemy import Column, ForeignKey, Integer, MetaData, Table diff --git a/lib/galaxy/model/migrate/versions/0035_item_annotations_and_workflow_step_tags.py b/lib/galaxy/model/migrate/versions/0035_item_annotations_and_workflow_step_tags.py index 3be86d4ef416..55600d8a630b 100644 --- a/lib/galaxy/model/migrate/versions/0035_item_annotations_and_workflow_step_tags.py +++ b/lib/galaxy/model/migrate/versions/0035_item_annotations_and_workflow_step_tags.py @@ -1,6 +1,8 @@ """ Migration script to (a) create tables for annotating objects and (b) create tags for workflow steps. """ +from __future__ import print_function + import logging from sqlalchemy import Column, ForeignKey, Index, Integer, MetaData, Table, TEXT, Unicode diff --git a/lib/galaxy/model/migrate/versions/0036_add_deleted_column_to_library_template_assoc_tables.py b/lib/galaxy/model/migrate/versions/0036_add_deleted_column_to_library_template_assoc_tables.py index a13ed69a0bfc..bf0918910652 100644 --- a/lib/galaxy/model/migrate/versions/0036_add_deleted_column_to_library_template_assoc_tables.py +++ b/lib/galaxy/model/migrate/versions/0036_add_deleted_column_to_library_template_assoc_tables.py @@ -2,6 +2,8 @@ Migration script to add a deleted column to the following tables: library_info_association, library_folder_info_association, library_dataset_dataset_info_association. """ +from __future__ import print_function + import logging from sqlalchemy import Boolean, Column, MetaData, Table diff --git a/lib/galaxy/model/migrate/versions/0037_samples_library.py b/lib/galaxy/model/migrate/versions/0037_samples_library.py index 008a89c1747c..3f7e541bbdf8 100644 --- a/lib/galaxy/model/migrate/versions/0037_samples_library.py +++ b/lib/galaxy/model/migrate/versions/0037_samples_library.py @@ -4,6 +4,8 @@ to store the sequencer login information. Finally, this adds a 'dataset_files' column to the sample table. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0038_add_inheritable_column_to_library_template_assoc_tables.py b/lib/galaxy/model/migrate/versions/0038_add_inheritable_column_to_library_template_assoc_tables.py index f2013829988e..e87a1b745a5a 100644 --- a/lib/galaxy/model/migrate/versions/0038_add_inheritable_column_to_library_template_assoc_tables.py +++ b/lib/galaxy/model/migrate/versions/0038_add_inheritable_column_to_library_template_assoc_tables.py @@ -4,6 +4,8 @@ Also, in case of sqlite check if the previous migration script deleted the request table and if so, restore the table. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0039_add_synopsis_column_to_library_table.py b/lib/galaxy/model/migrate/versions/0039_add_synopsis_column_to_library_table.py index 45afb6d0d055..72a73030f356 100644 --- a/lib/galaxy/model/migrate/versions/0039_add_synopsis_column_to_library_table.py +++ b/lib/galaxy/model/migrate/versions/0039_add_synopsis_column_to_library_table.py @@ -1,6 +1,8 @@ """ Migration script to add a synopsis column to the library table. """ +from __future__ import print_function + import logging from sqlalchemy import Column, MetaData, Table, TEXT diff --git a/lib/galaxy/model/migrate/versions/0040_page_annotations.py b/lib/galaxy/model/migrate/versions/0040_page_annotations.py index eb9c3ee0106c..9fe0e0576cdf 100644 --- a/lib/galaxy/model/migrate/versions/0040_page_annotations.py +++ b/lib/galaxy/model/migrate/versions/0040_page_annotations.py @@ -1,6 +1,8 @@ """ Migration script to (a) create tables for annotating pages. """ +from __future__ import print_function + import logging from sqlalchemy import Column, ForeignKey, Integer, MetaData, Table, TEXT diff --git a/lib/galaxy/model/migrate/versions/0041_workflow_invocation.py b/lib/galaxy/model/migrate/versions/0041_workflow_invocation.py index 27a5112d58ac..a1a41796962f 100644 --- a/lib/galaxy/model/migrate/versions/0041_workflow_invocation.py +++ b/lib/galaxy/model/migrate/versions/0041_workflow_invocation.py @@ -1,6 +1,8 @@ """ Migration script to create tables for tracking workflow invocations. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0042_workflow_invocation_fix.py b/lib/galaxy/model/migrate/versions/0042_workflow_invocation_fix.py index d95969445f8f..a5f0cf691ab7 100644 --- a/lib/galaxy/model/migrate/versions/0042_workflow_invocation_fix.py +++ b/lib/galaxy/model/migrate/versions/0042_workflow_invocation_fix.py @@ -1,6 +1,8 @@ """ Drop and readd workflow invocation tables, allowing null jobs """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py b/lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py index 1f8878d3c376..642d2e995cc4 100644 --- a/lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py +++ b/lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py @@ -1,6 +1,8 @@ """ Migration script to create tables and columns for sharing visualizations. """ +from __future__ import print_function + import logging from sqlalchemy import Boolean, Column, ForeignKey, Index, Integer, MetaData, Table, TEXT, Unicode diff --git a/lib/galaxy/model/migrate/versions/0044_add_notify_column_to_request_table.py b/lib/galaxy/model/migrate/versions/0044_add_notify_column_to_request_table.py index 61a5cd2d2a7d..825a824aa806 100644 --- a/lib/galaxy/model/migrate/versions/0044_add_notify_column_to_request_table.py +++ b/lib/galaxy/model/migrate/versions/0044_add_notify_column_to_request_table.py @@ -1,6 +1,8 @@ """ Migration script to add a notify column to the request table. """ +from __future__ import print_function + import logging from sqlalchemy import Boolean, Column, MetaData, Table diff --git a/lib/galaxy/model/migrate/versions/0045_request_type_permissions_table.py b/lib/galaxy/model/migrate/versions/0045_request_type_permissions_table.py index 300eb8660ae5..72b0cd735a2c 100644 --- a/lib/galaxy/model/migrate/versions/0045_request_type_permissions_table.py +++ b/lib/galaxy/model/migrate/versions/0045_request_type_permissions_table.py @@ -1,6 +1,8 @@ """ Migration script to add the request_type_permissions table. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0046_post_job_actions.py b/lib/galaxy/model/migrate/versions/0046_post_job_actions.py index 7a04f453a7ce..e282800addde 100644 --- a/lib/galaxy/model/migrate/versions/0046_post_job_actions.py +++ b/lib/galaxy/model/migrate/versions/0046_post_job_actions.py @@ -1,6 +1,8 @@ """ Migration script to create tables for handling post-job actions. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0047_job_table_user_id_column.py b/lib/galaxy/model/migrate/versions/0047_job_table_user_id_column.py index d50517e0cf22..846ae8cd600a 100644 --- a/lib/galaxy/model/migrate/versions/0047_job_table_user_id_column.py +++ b/lib/galaxy/model/migrate/versions/0047_job_table_user_id_column.py @@ -1,6 +1,8 @@ """ Add a user_id column to the job table. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0048_dataset_instance_state_column.py b/lib/galaxy/model/migrate/versions/0048_dataset_instance_state_column.py index a1a66d68bb81..e4003f9f307c 100644 --- a/lib/galaxy/model/migrate/versions/0048_dataset_instance_state_column.py +++ b/lib/galaxy/model/migrate/versions/0048_dataset_instance_state_column.py @@ -1,6 +1,8 @@ """ Add a state column to the history_dataset_association and library_dataset_dataset_association table. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0049_api_keys_table.py b/lib/galaxy/model/migrate/versions/0049_api_keys_table.py index 0696a0d04efb..6bfa9fa3fb30 100644 --- a/lib/galaxy/model/migrate/versions/0049_api_keys_table.py +++ b/lib/galaxy/model/migrate/versions/0049_api_keys_table.py @@ -1,6 +1,8 @@ """ Migration script to add the api_keys table. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0050_drop_cloud_tables.py b/lib/galaxy/model/migrate/versions/0050_drop_cloud_tables.py index 1e794fd5afab..5508bc0b48eb 100644 --- a/lib/galaxy/model/migrate/versions/0050_drop_cloud_tables.py +++ b/lib/galaxy/model/migrate/versions/0050_drop_cloud_tables.py @@ -1,3 +1,5 @@ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0051_imported_col_for_jobs_table.py b/lib/galaxy/model/migrate/versions/0051_imported_col_for_jobs_table.py index ecb5ced6f6dd..2c88a70718aa 100644 --- a/lib/galaxy/model/migrate/versions/0051_imported_col_for_jobs_table.py +++ b/lib/galaxy/model/migrate/versions/0051_imported_col_for_jobs_table.py @@ -1,6 +1,8 @@ """ Migration script to add imported column for jobs table. """ +from __future__ import print_function + import logging from sqlalchemy import Boolean, Column, MetaData, Table diff --git a/lib/galaxy/model/migrate/versions/0052_sample_dataset_table.py b/lib/galaxy/model/migrate/versions/0052_sample_dataset_table.py index 7781a19b689c..26e7ca09c239 100644 --- a/lib/galaxy/model/migrate/versions/0052_sample_dataset_table.py +++ b/lib/galaxy/model/migrate/versions/0052_sample_dataset_table.py @@ -2,6 +2,8 @@ Migration script to add the sample_dataset table and remove the 'dataset_files' column from the 'sample' table """ +from __future__ import print_function + import datetime import logging from json import loads diff --git a/lib/galaxy/model/migrate/versions/0053_item_ratings.py b/lib/galaxy/model/migrate/versions/0053_item_ratings.py index 688ebc83d05f..83db5b793f76 100644 --- a/lib/galaxy/model/migrate/versions/0053_item_ratings.py +++ b/lib/galaxy/model/migrate/versions/0053_item_ratings.py @@ -1,6 +1,8 @@ """ Migration script to create tables for rating histories, datasets, workflows, pages, and visualizations. """ +from __future__ import print_function + import logging from sqlalchemy import Column, ForeignKey, Index, Integer, MetaData, Table diff --git a/lib/galaxy/model/migrate/versions/0054_visualization_dbkey.py b/lib/galaxy/model/migrate/versions/0054_visualization_dbkey.py index d9f6be19067f..3cfe92d9cb5e 100644 --- a/lib/galaxy/model/migrate/versions/0054_visualization_dbkey.py +++ b/lib/galaxy/model/migrate/versions/0054_visualization_dbkey.py @@ -1,6 +1,8 @@ """ Migration script to add dbkey column for visualization. """ +from __future__ import print_function + import logging from json import loads diff --git a/lib/galaxy/model/migrate/versions/0055_add_pja_assoc_for_jobs.py b/lib/galaxy/model/migrate/versions/0055_add_pja_assoc_for_jobs.py index da4c58f9d59b..1a7eca54171f 100644 --- a/lib/galaxy/model/migrate/versions/0055_add_pja_assoc_for_jobs.py +++ b/lib/galaxy/model/migrate/versions/0055_add_pja_assoc_for_jobs.py @@ -1,6 +1,8 @@ """ Migration script to add the post_job_action_association table. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0056_workflow_outputs.py b/lib/galaxy/model/migrate/versions/0056_workflow_outputs.py index 4f2cf97425a1..d0a0f7e886f4 100644 --- a/lib/galaxy/model/migrate/versions/0056_workflow_outputs.py +++ b/lib/galaxy/model/migrate/versions/0056_workflow_outputs.py @@ -1,6 +1,8 @@ """ Migration script to create tables for adding explicit workflow outputs. """ +from __future__ import print_function + import logging from sqlalchemy import Column, ForeignKey, Integer, MetaData, String, Table diff --git a/lib/galaxy/model/migrate/versions/0057_request_notify.py b/lib/galaxy/model/migrate/versions/0057_request_notify.py index 07c382a70e14..2c2c100d6b2b 100644 --- a/lib/galaxy/model/migrate/versions/0057_request_notify.py +++ b/lib/galaxy/model/migrate/versions/0057_request_notify.py @@ -2,6 +2,8 @@ Migration script to modify the 'notify' field in the 'request' table from a boolean to a JSONType """ +from __future__ import print_function + import datetime import logging from json import dumps diff --git a/lib/galaxy/model/migrate/versions/0058_history_import_export.py b/lib/galaxy/model/migrate/versions/0058_history_import_export.py index f6c4953e5f6d..7b14d66638fb 100644 --- a/lib/galaxy/model/migrate/versions/0058_history_import_export.py +++ b/lib/galaxy/model/migrate/versions/0058_history_import_export.py @@ -1,6 +1,8 @@ """ Migration script to create table for exporting histories to archives. """ +from __future__ import print_function + import logging from sqlalchemy import Boolean, Column, ForeignKey, Integer, MetaData, Table, TEXT diff --git a/lib/galaxy/model/migrate/versions/0059_sample_dataset_file_path.py b/lib/galaxy/model/migrate/versions/0059_sample_dataset_file_path.py index 47c2cad2687c..3768a4145064 100644 --- a/lib/galaxy/model/migrate/versions/0059_sample_dataset_file_path.py +++ b/lib/galaxy/model/migrate/versions/0059_sample_dataset_file_path.py @@ -2,6 +2,8 @@ Migration script to modify the 'file_path' field type in 'sample_dataset' table to 'TEXT' so that it can support large file paths exceeding 255 characters """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0060_history_archive_import.py b/lib/galaxy/model/migrate/versions/0060_history_archive_import.py index 64170b5602ef..ccdaa73d2e92 100644 --- a/lib/galaxy/model/migrate/versions/0060_history_archive_import.py +++ b/lib/galaxy/model/migrate/versions/0060_history_archive_import.py @@ -2,6 +2,8 @@ Migration script to create column and table for importing histories from file archives. """ +from __future__ import print_function + import logging from sqlalchemy import Boolean, Column, ForeignKey, Integer, MetaData, Table, TEXT diff --git a/lib/galaxy/model/migrate/versions/0061_tasks.py b/lib/galaxy/model/migrate/versions/0061_tasks.py index cec0c5d1e8b0..b83d64baf647 100644 --- a/lib/galaxy/model/migrate/versions/0061_tasks.py +++ b/lib/galaxy/model/migrate/versions/0061_tasks.py @@ -1,6 +1,8 @@ """ Migration script to create tables task management. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0062_user_openid_table.py b/lib/galaxy/model/migrate/versions/0062_user_openid_table.py index 59f80f975ad4..519ce8180615 100644 --- a/lib/galaxy/model/migrate/versions/0062_user_openid_table.py +++ b/lib/galaxy/model/migrate/versions/0062_user_openid_table.py @@ -2,6 +2,8 @@ Migration script to create table for associating sessions and users with OpenIDs. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0063_sequencer_table.py b/lib/galaxy/model/migrate/versions/0063_sequencer_table.py index 77a57db0a61c..c90f262e02cc 100644 --- a/lib/galaxy/model/migrate/versions/0063_sequencer_table.py +++ b/lib/galaxy/model/migrate/versions/0063_sequencer_table.py @@ -1,6 +1,8 @@ """ Migration script to create a new 'sequencer' table """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0064_add_run_and_sample_run_association_tables.py b/lib/galaxy/model/migrate/versions/0064_add_run_and_sample_run_association_tables.py index d46da866a6ec..b9f3849de91b 100644 --- a/lib/galaxy/model/migrate/versions/0064_add_run_and_sample_run_association_tables.py +++ b/lib/galaxy/model/migrate/versions/0064_add_run_and_sample_run_association_tables.py @@ -1,6 +1,8 @@ """ Migration script to add the run and sample_run_association tables. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0065_add_name_to_form_fields_and_values.py b/lib/galaxy/model/migrate/versions/0065_add_name_to_form_fields_and_values.py index dc10e46ab97f..d64c5bf7f045 100644 --- a/lib/galaxy/model/migrate/versions/0065_add_name_to_form_fields_and_values.py +++ b/lib/galaxy/model/migrate/versions/0065_add_name_to_form_fields_and_values.py @@ -3,6 +3,8 @@ a form definition field and the form values in the database. In the 'form_values' table, the 'content' column is now a JSON dict instead of a list. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0066_deferred_job_and_transfer_job_tables.py b/lib/galaxy/model/migrate/versions/0066_deferred_job_and_transfer_job_tables.py index 09d787964b38..c7d2f5db7a62 100644 --- a/lib/galaxy/model/migrate/versions/0066_deferred_job_and_transfer_job_tables.py +++ b/lib/galaxy/model/migrate/versions/0066_deferred_job_and_transfer_job_tables.py @@ -2,6 +2,8 @@ Migration script to create table for storing deferred job and managed transfer information. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0067_populate_sequencer_table.py b/lib/galaxy/model/migrate/versions/0067_populate_sequencer_table.py index 908edb319d61..d1fc39db3126 100644 --- a/lib/galaxy/model/migrate/versions/0067_populate_sequencer_table.py +++ b/lib/galaxy/model/migrate/versions/0067_populate_sequencer_table.py @@ -4,6 +4,8 @@ column in the 'request_type' table and adds a foreign key to the 'sequencer' table. The actual contents of the datatx_info column are stored as form_values. """ +from __future__ import print_function + import datetime import logging from json import dumps, loads diff --git a/lib/galaxy/model/migrate/versions/0068_rename_sequencer_to_external_services.py b/lib/galaxy/model/migrate/versions/0068_rename_sequencer_to_external_services.py index 7c98bf76353b..4f2e8beb43db 100644 --- a/lib/galaxy/model/migrate/versions/0068_rename_sequencer_to_external_services.py +++ b/lib/galaxy/model/migrate/versions/0068_rename_sequencer_to_external_services.py @@ -6,6 +6,8 @@ table 'external_service'. Finally, adds a foreign key to the external_service table in the sample_dataset table and populates it. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0069_rename_sequencer_form_type.py b/lib/galaxy/model/migrate/versions/0069_rename_sequencer_form_type.py index 693e00118c43..8167456490fc 100644 --- a/lib/galaxy/model/migrate/versions/0069_rename_sequencer_form_type.py +++ b/lib/galaxy/model/migrate/versions/0069_rename_sequencer_form_type.py @@ -1,6 +1,8 @@ """ Migration script to rename the sequencer information form type to external service information form """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0070_add_info_column_to_deferred_job_table.py b/lib/galaxy/model/migrate/versions/0070_add_info_column_to_deferred_job_table.py index 74f96c36d0da..f51401d44d33 100644 --- a/lib/galaxy/model/migrate/versions/0070_add_info_column_to_deferred_job_table.py +++ b/lib/galaxy/model/migrate/versions/0070_add_info_column_to_deferred_job_table.py @@ -1,6 +1,8 @@ """ Migration script to add 'info' column to the transfer_job table. """ +from __future__ import print_function + import logging from sqlalchemy import Column, MetaData, Table, TEXT diff --git a/lib/galaxy/model/migrate/versions/0071_add_history_and_workflow_to_sample.py b/lib/galaxy/model/migrate/versions/0071_add_history_and_workflow_to_sample.py index b10ed120f1f1..2d5809185f4e 100644 --- a/lib/galaxy/model/migrate/versions/0071_add_history_and_workflow_to_sample.py +++ b/lib/galaxy/model/migrate/versions/0071_add_history_and_workflow_to_sample.py @@ -1,6 +1,8 @@ """ Migration script to add 'workflow' and 'history' columns for a sample. """ +from __future__ import print_function + import logging from sqlalchemy import Column, ForeignKey, Integer, MetaData, Table diff --git a/lib/galaxy/model/migrate/versions/0072_add_pid_and_socket_columns_to_transfer_job_table.py b/lib/galaxy/model/migrate/versions/0072_add_pid_and_socket_columns_to_transfer_job_table.py index 1399ab5606ae..a183b52a6c52 100644 --- a/lib/galaxy/model/migrate/versions/0072_add_pid_and_socket_columns_to_transfer_job_table.py +++ b/lib/galaxy/model/migrate/versions/0072_add_pid_and_socket_columns_to_transfer_job_table.py @@ -1,6 +1,8 @@ """ Migration script to add 'pid' and 'socket' columns to the transfer_job table. """ +from __future__ import print_function + import logging from sqlalchemy import Column, Integer, MetaData, Table diff --git a/lib/galaxy/model/migrate/versions/0073_add_ldda_to_implicit_conversion_table.py b/lib/galaxy/model/migrate/versions/0073_add_ldda_to_implicit_conversion_table.py index a786d281f01b..8002f8e3def7 100644 --- a/lib/galaxy/model/migrate/versions/0073_add_ldda_to_implicit_conversion_table.py +++ b/lib/galaxy/model/migrate/versions/0073_add_ldda_to_implicit_conversion_table.py @@ -1,6 +1,8 @@ """ Migration script to add 'ldda_parent_id' column to the implicitly_converted_dataset_association table. """ +from __future__ import print_function + import logging from sqlalchemy import Column, ForeignKey, Integer, MetaData, Table diff --git a/lib/galaxy/model/migrate/versions/0074_add_purged_column_to_library_dataset_table.py b/lib/galaxy/model/migrate/versions/0074_add_purged_column_to_library_dataset_table.py index 91f8255e88f3..688b00d5ba54 100644 --- a/lib/galaxy/model/migrate/versions/0074_add_purged_column_to_library_dataset_table.py +++ b/lib/galaxy/model/migrate/versions/0074_add_purged_column_to_library_dataset_table.py @@ -1,6 +1,8 @@ """ Migration script to add 'purged' column to the library_dataset table. """ +from __future__ import print_function + import logging from sqlalchemy import Boolean, Column, MetaData, Table diff --git a/lib/galaxy/model/migrate/versions/0075_add_subindex_column_to_run_table.py b/lib/galaxy/model/migrate/versions/0075_add_subindex_column_to_run_table.py index 51bef48e44c8..431a170a63f7 100644 --- a/lib/galaxy/model/migrate/versions/0075_add_subindex_column_to_run_table.py +++ b/lib/galaxy/model/migrate/versions/0075_add_subindex_column_to_run_table.py @@ -1,6 +1,8 @@ """ Migration script to add a 'subindex' column to the run table. """ +from __future__ import print_function + from sqlalchemy import Column, MetaData, Table from galaxy.model.custom_types import TrimmedString diff --git a/lib/galaxy/model/migrate/versions/0076_fix_form_values_data_corruption.py b/lib/galaxy/model/migrate/versions/0076_fix_form_values_data_corruption.py index defd24d625b5..d17f38192fae 100644 --- a/lib/galaxy/model/migrate/versions/0076_fix_form_values_data_corruption.py +++ b/lib/galaxy/model/migrate/versions/0076_fix_form_values_data_corruption.py @@ -2,6 +2,8 @@ This migration script fixes the data corruption caused in the form_values table (content json field) by migrate script 65. ''' +from __future__ import print_function + import logging from json import dumps, loads diff --git a/lib/galaxy/model/migrate/versions/0077_create_tool_tag_association_table.py b/lib/galaxy/model/migrate/versions/0077_create_tool_tag_association_table.py index 2a53e6135018..cedde80c99cf 100644 --- a/lib/galaxy/model/migrate/versions/0077_create_tool_tag_association_table.py +++ b/lib/galaxy/model/migrate/versions/0077_create_tool_tag_association_table.py @@ -1,6 +1,8 @@ """ Migration script to create table for storing tool tag associations. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0078_add_columns_for_disk_usage_accounting.py b/lib/galaxy/model/migrate/versions/0078_add_columns_for_disk_usage_accounting.py index 4c628efd261e..653cd527b60f 100644 --- a/lib/galaxy/model/migrate/versions/0078_add_columns_for_disk_usage_accounting.py +++ b/lib/galaxy/model/migrate/versions/0078_add_columns_for_disk_usage_accounting.py @@ -3,6 +3,8 @@ column to the HDA table, and 'disk_usage' column to the User and GalaxySession tables. """ +from __future__ import print_function + import logging from sqlalchemy import Boolean, Column, MetaData, Numeric, Table diff --git a/lib/galaxy/model/migrate/versions/0079_input_library_to_job_table.py b/lib/galaxy/model/migrate/versions/0079_input_library_to_job_table.py index 6f84cde2b3a5..3360b8292499 100644 --- a/lib/galaxy/model/migrate/versions/0079_input_library_to_job_table.py +++ b/lib/galaxy/model/migrate/versions/0079_input_library_to_job_table.py @@ -1,6 +1,8 @@ """ Migration script to add the job_to_input_library_dataset table. """ +from __future__ import print_function + import logging from sqlalchemy import Column, ForeignKey, Integer, MetaData, String, Table diff --git a/lib/galaxy/model/migrate/versions/0080_quota_tables.py b/lib/galaxy/model/migrate/versions/0080_quota_tables.py index 77ca768b10a0..411d827f3535 100644 --- a/lib/galaxy/model/migrate/versions/0080_quota_tables.py +++ b/lib/galaxy/model/migrate/versions/0080_quota_tables.py @@ -1,6 +1,8 @@ """ Migration script to create tables for disk quotas. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0081_add_tool_version_to_hda_ldda.py b/lib/galaxy/model/migrate/versions/0081_add_tool_version_to_hda_ldda.py index 34b3949198b4..281c134fff14 100644 --- a/lib/galaxy/model/migrate/versions/0081_add_tool_version_to_hda_ldda.py +++ b/lib/galaxy/model/migrate/versions/0081_add_tool_version_to_hda_ldda.py @@ -1,6 +1,8 @@ """ Migration script to add a 'tool_version' column to the hda/ldda tables. """ +from __future__ import print_function + from sqlalchemy import Column, MetaData, Table, TEXT metadata = MetaData() diff --git a/lib/galaxy/model/migrate/versions/0082_add_tool_shed_repository_table.py b/lib/galaxy/model/migrate/versions/0082_add_tool_shed_repository_table.py index b7cfe370276b..f8ea186fc490 100644 --- a/lib/galaxy/model/migrate/versions/0082_add_tool_shed_repository_table.py +++ b/lib/galaxy/model/migrate/versions/0082_add_tool_shed_repository_table.py @@ -1,6 +1,8 @@ """ Migration script to add the tool_shed_repository table. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0083_add_prepare_files_to_task.py b/lib/galaxy/model/migrate/versions/0083_add_prepare_files_to_task.py index e65d06b8a349..220046eac99f 100644 --- a/lib/galaxy/model/migrate/versions/0083_add_prepare_files_to_task.py +++ b/lib/galaxy/model/migrate/versions/0083_add_prepare_files_to_task.py @@ -1,6 +1,8 @@ """ Migration script to add 'prepare_input_files_cmd' column to the task table and to rename a column. """ +from __future__ import print_function + import logging from sqlalchemy import Column, MetaData, String, Table, TEXT diff --git a/lib/galaxy/model/migrate/versions/0084_add_ldda_id_to_implicit_conversion_table.py b/lib/galaxy/model/migrate/versions/0084_add_ldda_id_to_implicit_conversion_table.py index 5dde0a3a1dc1..549c7f5e79bb 100644 --- a/lib/galaxy/model/migrate/versions/0084_add_ldda_id_to_implicit_conversion_table.py +++ b/lib/galaxy/model/migrate/versions/0084_add_ldda_id_to_implicit_conversion_table.py @@ -1,6 +1,8 @@ """ Migration script to add 'ldda_id' column to the implicitly_converted_dataset_association table. """ +from __future__ import print_function + import logging from sqlalchemy import Column, ForeignKey, Integer, MetaData, Table diff --git a/lib/galaxy/model/migrate/versions/0085_add_task_info.py b/lib/galaxy/model/migrate/versions/0085_add_task_info.py index 327bb8303326..35dacee9a9a5 100644 --- a/lib/galaxy/model/migrate/versions/0085_add_task_info.py +++ b/lib/galaxy/model/migrate/versions/0085_add_task_info.py @@ -1,6 +1,8 @@ """ Migration script to add 'info' column to the task table. """ +from __future__ import print_function + import logging from sqlalchemy import Column, MetaData, Table diff --git a/lib/galaxy/model/migrate/versions/0086_add_tool_shed_repository_table_columns.py b/lib/galaxy/model/migrate/versions/0086_add_tool_shed_repository_table_columns.py index 81dacdb3dd9d..cc39f7b63ca1 100644 --- a/lib/galaxy/model/migrate/versions/0086_add_tool_shed_repository_table_columns.py +++ b/lib/galaxy/model/migrate/versions/0086_add_tool_shed_repository_table_columns.py @@ -1,6 +1,8 @@ """ Migration script to add the metadata, update_available and includes_datatypes columns to the tool_shed_repository table. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0087_tool_id_guid_map_table.py b/lib/galaxy/model/migrate/versions/0087_tool_id_guid_map_table.py index b51035bf42e7..6892030af23f 100644 --- a/lib/galaxy/model/migrate/versions/0087_tool_id_guid_map_table.py +++ b/lib/galaxy/model/migrate/versions/0087_tool_id_guid_map_table.py @@ -1,6 +1,8 @@ """ Migration script to create the tool_id_guid_map table. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0088_add_installed_changeset_revison_column.py b/lib/galaxy/model/migrate/versions/0088_add_installed_changeset_revison_column.py index 38ad15192872..7c68112c31e4 100644 --- a/lib/galaxy/model/migrate/versions/0088_add_installed_changeset_revison_column.py +++ b/lib/galaxy/model/migrate/versions/0088_add_installed_changeset_revison_column.py @@ -1,6 +1,8 @@ """ Migration script to add the installed_changeset_revision column to the tool_shed_repository table. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0089_add_object_store_id_columns.py b/lib/galaxy/model/migrate/versions/0089_add_object_store_id_columns.py index acf4469413a4..51917b5b8e23 100644 --- a/lib/galaxy/model/migrate/versions/0089_add_object_store_id_columns.py +++ b/lib/galaxy/model/migrate/versions/0089_add_object_store_id_columns.py @@ -1,6 +1,8 @@ """ Migration script to add 'object_store_id' column to various tables """ +from __future__ import print_function + import logging from sqlalchemy import Column, MetaData, Table diff --git a/lib/galaxy/model/migrate/versions/0090_add_tool_shed_repository_table_columns.py b/lib/galaxy/model/migrate/versions/0090_add_tool_shed_repository_table_columns.py index 8b35c9aab191..56a8c2102511 100644 --- a/lib/galaxy/model/migrate/versions/0090_add_tool_shed_repository_table_columns.py +++ b/lib/galaxy/model/migrate/versions/0090_add_tool_shed_repository_table_columns.py @@ -1,6 +1,8 @@ """ Migration script to add the uninstalled and dist_to_shed columns to the tool_shed_repository table. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0091_add_tool_version_tables.py b/lib/galaxy/model/migrate/versions/0091_add_tool_version_tables.py index f9ae343ac365..8737d6ac6152 100644 --- a/lib/galaxy/model/migrate/versions/0091_add_tool_version_tables.py +++ b/lib/galaxy/model/migrate/versions/0091_add_tool_version_tables.py @@ -1,6 +1,8 @@ """ Migration script to create the tool_version and tool_version_association tables and drop the tool_id_guid_map table. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0092_add_migrate_tools_table.py b/lib/galaxy/model/migrate/versions/0092_add_migrate_tools_table.py index e9d55d264bc3..2f8df45f1075 100644 --- a/lib/galaxy/model/migrate/versions/0092_add_migrate_tools_table.py +++ b/lib/galaxy/model/migrate/versions/0092_add_migrate_tools_table.py @@ -1,6 +1,8 @@ """ Migration script to create the migrate_tools table. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0093_add_job_params_col.py b/lib/galaxy/model/migrate/versions/0093_add_job_params_col.py index 12d1fa876ef2..94aae095cb03 100644 --- a/lib/galaxy/model/migrate/versions/0093_add_job_params_col.py +++ b/lib/galaxy/model/migrate/versions/0093_add_job_params_col.py @@ -1,6 +1,8 @@ """ Migration script to create "params" column in job table. """ +from __future__ import print_function + import logging from sqlalchemy import Column, MetaData, Table diff --git a/lib/galaxy/model/migrate/versions/0094_add_job_handler_col.py b/lib/galaxy/model/migrate/versions/0094_add_job_handler_col.py index 46ddd8e5b8ec..a534875b6e72 100644 --- a/lib/galaxy/model/migrate/versions/0094_add_job_handler_col.py +++ b/lib/galaxy/model/migrate/versions/0094_add_job_handler_col.py @@ -1,6 +1,8 @@ """ Migration script to create "handler" column in job table. """ +from __future__ import print_function + import logging from sqlalchemy import Column, MetaData, Table diff --git a/lib/galaxy/model/migrate/versions/0095_hda_subsets.py b/lib/galaxy/model/migrate/versions/0095_hda_subsets.py index d1c15c9052c6..fc26291411e5 100644 --- a/lib/galaxy/model/migrate/versions/0095_hda_subsets.py +++ b/lib/galaxy/model/migrate/versions/0095_hda_subsets.py @@ -1,6 +1,8 @@ """ Migration script to create table for tracking history_dataset_association subsets. """ +from __future__ import print_function + import logging from sqlalchemy import Column, ForeignKey, Index, Integer, MetaData, Table, Unicode diff --git a/lib/galaxy/model/migrate/versions/0096_openid_provider.py b/lib/galaxy/model/migrate/versions/0096_openid_provider.py index 99481801b744..3610515e9cc3 100644 --- a/lib/galaxy/model/migrate/versions/0096_openid_provider.py +++ b/lib/galaxy/model/migrate/versions/0096_openid_provider.py @@ -2,6 +2,8 @@ Migration script to add column to openid table for provider. Remove any OpenID entries with nonunique GenomeSpace Identifier """ +from __future__ import print_function + import logging from sqlalchemy import Column, MetaData, Table diff --git a/lib/galaxy/model/migrate/versions/0097_add_ctx_rev_column.py b/lib/galaxy/model/migrate/versions/0097_add_ctx_rev_column.py index 5f294e37ed2b..1e2016014f25 100644 --- a/lib/galaxy/model/migrate/versions/0097_add_ctx_rev_column.py +++ b/lib/galaxy/model/migrate/versions/0097_add_ctx_rev_column.py @@ -1,6 +1,8 @@ """ Migration script to add the ctx_rev column to the tool_shed_repository table. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0098_genome_index_tool_data_table.py b/lib/galaxy/model/migrate/versions/0098_genome_index_tool_data_table.py index eff33d3f9fce..6b8283b49b0e 100644 --- a/lib/galaxy/model/migrate/versions/0098_genome_index_tool_data_table.py +++ b/lib/galaxy/model/migrate/versions/0098_genome_index_tool_data_table.py @@ -1,6 +1,8 @@ """ Migration script to create the genome_index_tool_data table. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0099_add_tool_dependency_table.py b/lib/galaxy/model/migrate/versions/0099_add_tool_dependency_table.py index 3994679e7d30..b3e24b86645f 100644 --- a/lib/galaxy/model/migrate/versions/0099_add_tool_dependency_table.py +++ b/lib/galaxy/model/migrate/versions/0099_add_tool_dependency_table.py @@ -1,6 +1,8 @@ """ Migration script to add the tool_dependency table. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0100_alter_tool_dependency_table_version_column.py b/lib/galaxy/model/migrate/versions/0100_alter_tool_dependency_table_version_column.py index edeec5578938..ed8deec941b1 100644 --- a/lib/galaxy/model/migrate/versions/0100_alter_tool_dependency_table_version_column.py +++ b/lib/galaxy/model/migrate/versions/0100_alter_tool_dependency_table_version_column.py @@ -1,6 +1,8 @@ """ Migration script to alter the type of the tool_dependency.version column from TrimmedString(40) to Text. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0101_drop_installed_changeset_revision_column.py b/lib/galaxy/model/migrate/versions/0101_drop_installed_changeset_revision_column.py index 15f646d8c310..fcdf80263134 100644 --- a/lib/galaxy/model/migrate/versions/0101_drop_installed_changeset_revision_column.py +++ b/lib/galaxy/model/migrate/versions/0101_drop_installed_changeset_revision_column.py @@ -1,6 +1,8 @@ """ Migration script to drop the installed_changeset_revision column from the tool_dependency table. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0102_add_tool_dependency_status_columns.py b/lib/galaxy/model/migrate/versions/0102_add_tool_dependency_status_columns.py index 889d86351576..773b4bdbb6bb 100644 --- a/lib/galaxy/model/migrate/versions/0102_add_tool_dependency_status_columns.py +++ b/lib/galaxy/model/migrate/versions/0102_add_tool_dependency_status_columns.py @@ -1,6 +1,8 @@ """ Migration script to add status and error_message columns to the tool_dependency table and drop the uninstalled column from the tool_dependency table. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0103_add_tool_shed_repository_status_columns.py b/lib/galaxy/model/migrate/versions/0103_add_tool_shed_repository_status_columns.py index 78ccd57b9873..e19d0c76a0e1 100644 --- a/lib/galaxy/model/migrate/versions/0103_add_tool_shed_repository_status_columns.py +++ b/lib/galaxy/model/migrate/versions/0103_add_tool_shed_repository_status_columns.py @@ -1,4 +1,6 @@ """Migration script to add status and error_message columns to the tool_shed_repository table.""" +from __future__ import print_function + import datetime from sqlalchemy import Column, MetaData, Table, TEXT diff --git a/lib/galaxy/model/migrate/versions/0105_add_cleanup_event_table.py b/lib/galaxy/model/migrate/versions/0105_add_cleanup_event_table.py index 2782ea9748d2..58a538f86029 100644 --- a/lib/galaxy/model/migrate/versions/0105_add_cleanup_event_table.py +++ b/lib/galaxy/model/migrate/versions/0105_add_cleanup_event_table.py @@ -1,6 +1,8 @@ """ Migration script to add the cleanup_event* tables. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0106_add_missing_indexes.py b/lib/galaxy/model/migrate/versions/0106_add_missing_indexes.py index bd4ad57275d5..843aea40efba 100644 --- a/lib/galaxy/model/migrate/versions/0106_add_missing_indexes.py +++ b/lib/galaxy/model/migrate/versions/0106_add_missing_indexes.py @@ -1,6 +1,8 @@ """ Migration script to create missing indexes. Adding new columns to existing tables via SQLAlchemy does not create the index, even if the column definition includes index=True. """ +from __future__ import print_function + import logging from sqlalchemy import Index, MetaData, Table diff --git a/lib/galaxy/model/migrate/versions/0107_add_exit_code_to_job_and_task.py b/lib/galaxy/model/migrate/versions/0107_add_exit_code_to_job_and_task.py index 84f81792c216..ea1cfcc103b8 100644 --- a/lib/galaxy/model/migrate/versions/0107_add_exit_code_to_job_and_task.py +++ b/lib/galaxy/model/migrate/versions/0107_add_exit_code_to_job_and_task.py @@ -1,6 +1,8 @@ """ Add the exit_code column to the Job and Task tables. """ +from __future__ import print_function + import logging from sqlalchemy import Column, Integer, MetaData, Table diff --git a/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py b/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py index c083c8d08db3..35fe96f49acd 100644 --- a/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py +++ b/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py @@ -1,6 +1,8 @@ """ Add the ExtendedMetadata and ExtendedMetadataIndex tables """ +from __future__ import print_function + import logging from sqlalchemy import Column, ForeignKey, Integer, MetaData, String, Table, TEXT diff --git a/lib/galaxy/model/migrate/versions/0109_add_repository_dependency_tables.py b/lib/galaxy/model/migrate/versions/0109_add_repository_dependency_tables.py index ded3ed515960..ac94a18e850a 100644 --- a/lib/galaxy/model/migrate/versions/0109_add_repository_dependency_tables.py +++ b/lib/galaxy/model/migrate/versions/0109_add_repository_dependency_tables.py @@ -1,6 +1,8 @@ """ Migration script to add the repository_dependency and repository_repository_dependency_association tables. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0110_add_dataset_uuid.py b/lib/galaxy/model/migrate/versions/0110_add_dataset_uuid.py index cdf1d4e70c40..7f6437c4896b 100644 --- a/lib/galaxy/model/migrate/versions/0110_add_dataset_uuid.py +++ b/lib/galaxy/model/migrate/versions/0110_add_dataset_uuid.py @@ -1,6 +1,8 @@ """ Add UUID column to dataset table """ +from __future__ import print_function + import logging from sqlalchemy import Column, MetaData, Table diff --git a/lib/galaxy/model/migrate/versions/0111_add_job_destinations.py b/lib/galaxy/model/migrate/versions/0111_add_job_destinations.py index bfb1dc4e6d14..d0efe30957cd 100644 --- a/lib/galaxy/model/migrate/versions/0111_add_job_destinations.py +++ b/lib/galaxy/model/migrate/versions/0111_add_job_destinations.py @@ -1,6 +1,8 @@ """ Add support for job destinations to the job table """ +from __future__ import print_function + import logging from sqlalchemy import Column, MetaData, String, Table diff --git a/lib/galaxy/model/migrate/versions/0112_add_data_manager_history_association_and_data_manager_job_association_tables.py b/lib/galaxy/model/migrate/versions/0112_add_data_manager_history_association_and_data_manager_job_association_tables.py index d62f2722ee2e..a95bb551ef8c 100644 --- a/lib/galaxy/model/migrate/versions/0112_add_data_manager_history_association_and_data_manager_job_association_tables.py +++ b/lib/galaxy/model/migrate/versions/0112_add_data_manager_history_association_and_data_manager_job_association_tables.py @@ -1,6 +1,8 @@ """ Migration script to add the data_manager_history_association table and data_manager_job_association. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0113_update_migrate_tools_table.py b/lib/galaxy/model/migrate/versions/0113_update_migrate_tools_table.py index 07d54bb9ae4e..2b42b0324cd5 100644 --- a/lib/galaxy/model/migrate/versions/0113_update_migrate_tools_table.py +++ b/lib/galaxy/model/migrate/versions/0113_update_migrate_tools_table.py @@ -1,6 +1,8 @@ """ Migration script to update the migrate_tools.repository_path column to point to the new location lib/tool_shed/galaxy_install/migrate. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0114_update_migrate_tools_table_again.py b/lib/galaxy/model/migrate/versions/0114_update_migrate_tools_table_again.py index 4408533a57bb..79dd48aa1845 100644 --- a/lib/galaxy/model/migrate/versions/0114_update_migrate_tools_table_again.py +++ b/lib/galaxy/model/migrate/versions/0114_update_migrate_tools_table_again.py @@ -1,6 +1,8 @@ """ Migration script to update the migrate_tools.repository_path column to point to the new location lib/tool_shed/galaxy_install/migrate. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0116_drop_update_available_col_add_tool_shed_status_col.py b/lib/galaxy/model/migrate/versions/0116_drop_update_available_col_add_tool_shed_status_col.py index a5040e9b5322..699e81aa39f1 100644 --- a/lib/galaxy/model/migrate/versions/0116_drop_update_available_col_add_tool_shed_status_col.py +++ b/lib/galaxy/model/migrate/versions/0116_drop_update_available_col_add_tool_shed_status_col.py @@ -1,6 +1,8 @@ """ Migration script to drop the update_available Boolean column and replace it with the tool_shed_status JSONType column in the tool_shed_repository table. """ +from __future__ import print_function + import datetime import logging import sys diff --git a/lib/galaxy/model/migrate/versions/0117_add_user_activation.py b/lib/galaxy/model/migrate/versions/0117_add_user_activation.py index b01dc907464c..75f17fb50c96 100644 --- a/lib/galaxy/model/migrate/versions/0117_add_user_activation.py +++ b/lib/galaxy/model/migrate/versions/0117_add_user_activation.py @@ -5,6 +5,8 @@ Adds 'active' and 'activation_token' columns to the galaxy_user table. ''' +from __future__ import print_function + import logging from sqlalchemy import Boolean, Column, MetaData, Table diff --git a/lib/galaxy/model/migrate/versions/0118_add_hda_extended_metadata.py b/lib/galaxy/model/migrate/versions/0118_add_hda_extended_metadata.py index a9a9f4c0e712..df10d0d8b83a 100644 --- a/lib/galaxy/model/migrate/versions/0118_add_hda_extended_metadata.py +++ b/lib/galaxy/model/migrate/versions/0118_add_hda_extended_metadata.py @@ -1,6 +1,8 @@ """ Add link from history_dataset_association to the extended_metadata table """ +from __future__ import print_function + import logging from sqlalchemy import Column, ForeignKey, Integer, MetaData, Table diff --git a/lib/galaxy/model/migrate/versions/0119_job_metrics.py b/lib/galaxy/model/migrate/versions/0119_job_metrics.py index 4a8136243dfd..800ead137cee 100644 --- a/lib/galaxy/model/migrate/versions/0119_job_metrics.py +++ b/lib/galaxy/model/migrate/versions/0119_job_metrics.py @@ -1,6 +1,8 @@ """ Migration script for job metric plugins. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0120_dataset_collections.py b/lib/galaxy/model/migrate/versions/0120_dataset_collections.py index eec3d3bfb3ab..5b3fcdb15c15 100644 --- a/lib/galaxy/model/migrate/versions/0120_dataset_collections.py +++ b/lib/galaxy/model/migrate/versions/0120_dataset_collections.py @@ -1,6 +1,8 @@ """ Migration script for tables related to dataset collections. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0121_workflow_uuids.py b/lib/galaxy/model/migrate/versions/0121_workflow_uuids.py index 247ca660a498..2614128a9021 100644 --- a/lib/galaxy/model/migrate/versions/0121_workflow_uuids.py +++ b/lib/galaxy/model/migrate/versions/0121_workflow_uuids.py @@ -1,6 +1,8 @@ """ Add UUIDs to workflows """ +from __future__ import print_function + import logging from sqlalchemy import Column, MetaData, Table diff --git a/lib/galaxy/model/migrate/versions/0122_grow_mysql_blobs.py b/lib/galaxy/model/migrate/versions/0122_grow_mysql_blobs.py index 3005fb11e9dd..e2b5dcfa0f40 100644 --- a/lib/galaxy/model/migrate/versions/0122_grow_mysql_blobs.py +++ b/lib/galaxy/model/migrate/versions/0122_grow_mysql_blobs.py @@ -1,6 +1,7 @@ """ Migration script to grow MySQL blobs. """ +from __future__ import print_function from sqlalchemy import MetaData diff --git a/lib/galaxy/model/migrate/versions/0123_add_workflow_request_tables.py b/lib/galaxy/model/migrate/versions/0123_add_workflow_request_tables.py index 8693e994c103..11a8d38be6e6 100644 --- a/lib/galaxy/model/migrate/versions/0123_add_workflow_request_tables.py +++ b/lib/galaxy/model/migrate/versions/0123_add_workflow_request_tables.py @@ -1,6 +1,8 @@ """ Migration script for workflow request tables. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0124_job_state_history.py b/lib/galaxy/model/migrate/versions/0124_job_state_history.py index d4a25816874c..2c93ca3283e9 100644 --- a/lib/galaxy/model/migrate/versions/0124_job_state_history.py +++ b/lib/galaxy/model/migrate/versions/0124_job_state_history.py @@ -1,6 +1,8 @@ """ Migration script for the job state history table """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0125_workflow_step_tracking.py b/lib/galaxy/model/migrate/versions/0125_workflow_step_tracking.py index a9086804c7e7..d53ec8e01713 100644 --- a/lib/galaxy/model/migrate/versions/0125_workflow_step_tracking.py +++ b/lib/galaxy/model/migrate/versions/0125_workflow_step_tracking.py @@ -1,6 +1,8 @@ """ Migration script to enhance workflow step usability by adding labels and UUIDs. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0126_password_reset.py b/lib/galaxy/model/migrate/versions/0126_password_reset.py index 66dbdf5b6fc6..d7387e6bd0ab 100644 --- a/lib/galaxy/model/migrate/versions/0126_password_reset.py +++ b/lib/galaxy/model/migrate/versions/0126_password_reset.py @@ -1,6 +1,8 @@ """ Migration script for the password reset table """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0127_output_collection_adjustments.py b/lib/galaxy/model/migrate/versions/0127_output_collection_adjustments.py index c1cca338f24c..ca4d4c4907ee 100644 --- a/lib/galaxy/model/migrate/versions/0127_output_collection_adjustments.py +++ b/lib/galaxy/model/migrate/versions/0127_output_collection_adjustments.py @@ -1,6 +1,8 @@ """ Migration script updating collections tables for output collections. """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0128_session_timeout.py b/lib/galaxy/model/migrate/versions/0128_session_timeout.py index 7914047ac917..948ec9cb8cee 100644 --- a/lib/galaxy/model/migrate/versions/0128_session_timeout.py +++ b/lib/galaxy/model/migrate/versions/0128_session_timeout.py @@ -1,6 +1,8 @@ """ Migration script to add session update time (used for timeouts) """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0129_job_external_output_metadata_validity.py b/lib/galaxy/model/migrate/versions/0129_job_external_output_metadata_validity.py index fe5dca741f2e..c7ba31a3d9e7 100644 --- a/lib/galaxy/model/migrate/versions/0129_job_external_output_metadata_validity.py +++ b/lib/galaxy/model/migrate/versions/0129_job_external_output_metadata_validity.py @@ -1,6 +1,8 @@ """ Migration script to allow invalidation of job external output metadata temp files """ +from __future__ import print_function + import datetime import logging diff --git a/lib/galaxy/model/migrate/versions/0130_change_pref_datatype.py b/lib/galaxy/model/migrate/versions/0130_change_pref_datatype.py index 220268fa5cc2..55994f6ad7e1 100644 --- a/lib/galaxy/model/migrate/versions/0130_change_pref_datatype.py +++ b/lib/galaxy/model/migrate/versions/0130_change_pref_datatype.py @@ -1,15 +1,15 @@ """ Migration script to change the value column of user_preference from varchar to text. """ +from __future__ import print_function + +import datetime +import logging from sqlalchemy import MetaData, Table, Text -import datetime now = datetime.datetime.utcnow - -import logging log = logging.getLogger( __name__ ) - metadata = MetaData() diff --git a/lib/galaxy/model/migrate/versions/0131_subworkflow_and_input_parameter_modules.py b/lib/galaxy/model/migrate/versions/0131_subworkflow_and_input_parameter_modules.py index ad1e1c485e42..bf791ec86475 100644 --- a/lib/galaxy/model/migrate/versions/0131_subworkflow_and_input_parameter_modules.py +++ b/lib/galaxy/model/migrate/versions/0131_subworkflow_and_input_parameter_modules.py @@ -1,6 +1,8 @@ """ Migration script to support subworkflows and workflow request input parameters """ +from __future__ import print_function + import datetime import logging diff --git a/test/api/test_workflows.py b/test/api/test_workflows.py index a6a9fcde2aa5..03f939cc9d09 100644 --- a/test/api/test_workflows.py +++ b/test/api/test_workflows.py @@ -1,3 +1,5 @@ +from __future__ import print_function + import time import yaml from json import dumps From 5eb60b3448f2cdf8706aa2ab43e4d51dcfc0a0cb Mon Sep 17 00:00:00 2001 From: Pablo Moreno Date: Tue, 28 Jun 2016 09:59:13 +0100 Subject: [PATCH 58/86] Adds missing whitespace to line in Kubernetes runner. --- lib/galaxy/jobs/runners/kubernetes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/jobs/runners/kubernetes.py b/lib/galaxy/jobs/runners/kubernetes.py index deb365c9efcd..146ebe243009 100644 --- a/lib/galaxy/jobs/runners/kubernetes.py +++ b/lib/galaxy/jobs/runners/kubernetes.py @@ -311,7 +311,7 @@ def __produce_log_file(self, job_state): logs += "\n\n==== Pod " + pod.name + " log end ====" except Exception as detail: log.info("Could not write pod\'s " + pod_obj['metadata']['name'] + - " log file due to HTTPError "+str(detail)) + " log file due to HTTPError " + str(detail)) logs_file_path = job_state.output_file logs_file = open(logs_file_path, mode="w") From c3bf89f75093bfe73f09d0f33dff8df6d60f9269 Mon Sep 17 00:00:00 2001 From: Dannon Baker Date: Tue, 28 Jun 2016 12:10:04 -0400 Subject: [PATCH 59/86] Fix tag_autocomplete issue. When we swapped to tag manager the signature changed and some of these were missed. --- lib/galaxy/webapps/galaxy/controllers/tag.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/galaxy/webapps/galaxy/controllers/tag.py b/lib/galaxy/webapps/galaxy/controllers/tag.py index 3e6dd444c903..509c6e862019 100644 --- a/lib/galaxy/webapps/galaxy/controllers/tag.py +++ b/lib/galaxy/webapps/galaxy/controllers/tag.py @@ -128,9 +128,9 @@ def _get_tag_autocomplete_names( self, trans, q, limit, timestamp, user=None, it # Create and return autocomplete data. ac_data = "#Header|Your Tags\n" for row in result_set: - tag = self.get_tag_handler( trans ).get_tag_by_id( trans, row[0] ) + tag = self.get_tag_handler( trans ).get_tag_by_id( row[0] ) # Exclude tags that are already applied to the item. - if ( item is not None ) and ( self.get_tag_handler( trans ).item_has_tag( trans, trans.user, item, tag ) ): + if ( item is not None ) and ( self.get_tag_handler( trans ).item_has_tag( trans.user, item, tag ) ): continue # Add tag to autocomplete data. Use the most frequent name that user # has employed for the tag. @@ -146,7 +146,7 @@ def _get_tag_autocomplete_values( self, trans, q, limit, timestamp, user=None, i tag_name_and_value = q.split( ":" ) tag_name = tag_name_and_value[0] tag_value = tag_name_and_value[1] - tag = self.get_tag_handler( trans ).get_tag_by_name( trans, tag_name ) + tag = self.get_tag_handler( trans ).get_tag_by_name( tag_name ) # Don't autocomplete if tag doesn't exist. if tag is None: return "" From d2d2b437372f7d96329c24f74ac2b2c4ccdef96d Mon Sep 17 00:00:00 2001 From: Anthony Bretaudeau Date: Tue, 28 Jun 2016 19:58:18 +0200 Subject: [PATCH 60/86] fix UnboundLocalError --- lib/galaxy/tools/parameters/basic.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/galaxy/tools/parameters/basic.py b/lib/galaxy/tools/parameters/basic.py index a918aecd1262..5004e0f2d0d9 100644 --- a/lib/galaxy/tools/parameters/basic.py +++ b/lib/galaxy/tools/parameters/basic.py @@ -2152,6 +2152,7 @@ def _get_single_collection_field( self, trans, history, value, other_values ): return field def from_json( self, value, trans, other_values={} ): + rval = None if trans.workflow_building_mode is workflow_building_modes.ENABLED: return None if not value and not self.optional: From 1d228d9a5c79f90e4b015c768f3841b132a4482b Mon Sep 17 00:00:00 2001 From: Anthony Bretaudeau Date: Tue, 28 Jun 2016 20:00:25 +0200 Subject: [PATCH 61/86] add test case for galaxyproject/tools-iuc/issues/857 --- test/api/test_tools.py | 35 +++++++++++++++++++ .../identifier_multiple_in_conditional.xml | 28 +++++++++++++++ test/functional/tools/samples_tool_conf.xml | 1 + 3 files changed, 64 insertions(+) create mode 100644 test/functional/tools/identifier_multiple_in_conditional.xml diff --git a/test/api/test_tools.py b/test/api/test_tools.py index 5f37d07db058..535dc56f182b 100644 --- a/test/api/test_tools.py +++ b/test/api/test_tools.py @@ -1,5 +1,6 @@ # Test tools API. from base import api +import json from operator import itemgetter from .helpers import DatasetPopulator from .helpers import DatasetCollectionPopulator @@ -805,6 +806,40 @@ def test_identifier_with_multiple_normal_datasets( self ): output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 ) self.assertEquals( output1_content.strip(), "Pasted Entry\nPasted Entry" ) + @skip_without_tool( "identifier_multiple_in_conditional" ) + def test_identifier_with_multiple_normal_datasets_in_conditional( self ): + history_id = self.dataset_populator.new_history() + + element_identifiers = self.dataset_collection_populator.list_identifiers( history_id ) + + payload = dict( + instance_type="history", + history_id=history_id, + element_identifiers=json.dumps(element_identifiers), + collection_type="list", + ) + + create_response = self._post( "dataset_collections", payload ) + dataset_collection = create_response.json() + + inputs = { + "options|choice": "one", + "options|input1": {'src': 'hdca', 'id': dataset_collection['id']}, + } + + self.dataset_populator.wait_for_history( history_id, assert_ok=True ) + create_response = self._run( "identifier_multiple_in_conditional", history_id, inputs ) + self._assert_status_code_is( create_response, 200 ) + create = create_response.json() + outputs = create[ 'outputs' ] + jobs = create[ 'jobs' ] + implicit_collections = create[ 'implicit_collections' ] + self.assertEquals( len( jobs ), 1 ) + self.assertEquals( len( outputs ), 1 ) + output1 = outputs[ 0 ] + output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 ) + self.assertEquals( output1_content.strip(), '\n'.join([d['name'] for d in element_identifiers]) ) + @skip_without_tool( "cat1" ) def test_map_over_nested_collections( self ): history_id = self.dataset_populator.new_history() diff --git a/test/functional/tools/identifier_multiple_in_conditional.xml b/test/functional/tools/identifier_multiple_in_conditional.xml new file mode 100644 index 000000000000..ff02e2840557 --- /dev/null +++ b/test/functional/tools/identifier_multiple_in_conditional.xml @@ -0,0 +1,28 @@ + + + #if $options.choice == "one": + #for $input in $options.input1: + echo '$input.element_identifier' >> 'output1'; + #end for + #end if + + + + + + + + + + + + + + + + + + + + + diff --git a/test/functional/tools/samples_tool_conf.xml b/test/functional/tools/samples_tool_conf.xml index 5ec9446d0f2c..16ed1fecf649 100644 --- a/test/functional/tools/samples_tool_conf.xml +++ b/test/functional/tools/samples_tool_conf.xml @@ -64,6 +64,7 @@ + From b5468340eed858efb1d4bc81c721e9362a0ff511 Mon Sep 17 00:00:00 2001 From: Eric Rasche Date: Tue, 28 Jun 2016 20:51:10 +0000 Subject: [PATCH 62/86] Fixes #2442 --- .../plugins/interactive_environments/jupyter/config/jupyter.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/config/plugins/interactive_environments/jupyter/config/jupyter.xml b/config/plugins/interactive_environments/jupyter/config/jupyter.xml index e1c8bd0040c9..437ed4da2c13 100644 --- a/config/plugins/interactive_environments/jupyter/config/jupyter.xml +++ b/config/plugins/interactive_environments/jupyter/config/jupyter.xml @@ -6,6 +6,7 @@ HistoryDatasetAssociation tabular.Tabular data.Text + binary.Bam dataset_id From f8fb69c81a7fd3341ecdff72f498526775bf9604 Mon Sep 17 00:00:00 2001 From: Eric Rasche Date: Tue, 28 Jun 2016 21:02:00 +0000 Subject: [PATCH 63/86] All the binary datatypes --- .../plugins/interactive_environments/jupyter/config/jupyter.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/plugins/interactive_environments/jupyter/config/jupyter.xml b/config/plugins/interactive_environments/jupyter/config/jupyter.xml index 437ed4da2c13..d4f42d5b3942 100644 --- a/config/plugins/interactive_environments/jupyter/config/jupyter.xml +++ b/config/plugins/interactive_environments/jupyter/config/jupyter.xml @@ -6,7 +6,7 @@ HistoryDatasetAssociation tabular.Tabular data.Text - binary.Bam + binary.Binary dataset_id From 9a97a2952f3c33a496157e86719fc41e10d32382 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Wed, 29 Jun 2016 01:55:44 -0400 Subject: [PATCH 64/86] Python3: test/unit/ --- .ci/py3_sources.txt | 21 +------- test/unit/managers/test_HDAManager.py | 30 +++++------ test/unit/managers/test_HDCAManager.py | 8 +-- .../managers/test_HistoryContentsManager.py | 53 +++++++++---------- test/unit/managers/test_HistoryManager.py | 24 ++++----- test/unit/managers/test_UserManager.py | 17 +++--- test/unit/tools/test_toolbox.py | 16 +++--- .../plugins/test_VisualizationPlugin.py | 6 ++- .../plugins/test_VisualizationsRegistry.py | 18 ++++--- test/unit/web/base/test_HookPluginManager.py | 20 +++---- .../web/base/test_PageServingPluginManager.py | 8 +-- test/unit/web/base/test_PluginManager.py | 12 ++--- 12 files changed, 104 insertions(+), 129 deletions(-) diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt index 398307358788..035b842699f8 100644 --- a/.ci/py3_sources.txt +++ b/.ci/py3_sources.txt @@ -69,26 +69,7 @@ scripts/cleanup_datasets/update_metadata.py scripts/data_libraries/build_whoosh_index.py scripts/db_shell.py scripts/drmaa_external_runner.py -test/api/helpers.py -test/api/test_datasets.py -test/api/test_tool_data.py -test/api/test_workflow_extraction.py -test/api/test_workflows.py -test/api/test_workflows_from_yaml.py -test/base/ -test/casperjs/ -test/functional/ -test/integration/ -test/manual/ -test/shed_functional/base/twilltestcase.py -test/shed_functional/functional_tests.py -test/unit/datatypes/dataproviders/test_base_dataproviders.py -test/unit/datatypes/dataproviders/test_line_dataproviders.py -test/unit/managers/base.py -test/unit/managers/test_DatasetManager.py -test/unit/test_galaxy_mapping.py -test/unit/tools/test_actions.py -test/unit/workflows/test_run_parameters.py +test/ tool_list.py tools/data_source/ tools/evolution/ diff --git a/test/unit/managers/test_HDAManager.py b/test/unit/managers/test_HDAManager.py index ccfd19988bcb..cd7aa448d6ac 100644 --- a/test/unit/managers/test_HDAManager.py +++ b/test/unit/managers/test_HDAManager.py @@ -1,22 +1,20 @@ # -*- coding: utf-8 -*- -import os import imp +import os import unittest test_utils = imp.load_source( 'test_utils', os.path.join( os.path.dirname( __file__), '../unittest_utils/utility.py' ) ) import sqlalchemy +from six import string_types -from galaxy import model -from galaxy import exceptions - -from base import BaseTestCase - -from galaxy.managers.histories import HistoryManager -from galaxy.managers.datasets import DatasetManager +from galaxy import exceptions, model from galaxy.managers import hdas +from galaxy.managers.datasets import DatasetManager +from galaxy.managers.histories import HistoryManager +from .base import BaseTestCase # ============================================================================= default_password = '123456' @@ -429,11 +427,11 @@ def test_serializers( self ): self.assertIsInstance( serialized[ 'dataset' ], dict ) self.assertEncodedId( serialized[ 'dataset_id' ] ) self.assertUUID( serialized[ 'uuid' ] ) - self.assertIsInstance( serialized[ 'file_name' ], basestring ) - self.assertIsInstance( serialized[ 'extra_files_path' ], basestring ) + self.assertIsInstance( serialized[ 'file_name' ], string_types ) + self.assertIsInstance( serialized[ 'extra_files_path' ], string_types ) self.assertIsInstance( serialized[ 'size' ], int ) self.assertIsInstance( serialized[ 'file_size' ], int ) - self.assertIsInstance( serialized[ 'nice_size' ], basestring ) + self.assertIsInstance( serialized[ 'nice_size' ], string_types ) # TODO: these should be tested w/copy self.assertNullableEncodedId( serialized[ 'copied_from_history_dataset_association_id'] ) self.assertNullableEncodedId( serialized[ 'copied_from_library_dataset_dataset_association_id'] ) @@ -443,8 +441,8 @@ def test_serializers( self ): self.assertIsInstance( serialized[ 'meta_files' ], list ) self.assertNullableEncodedId( serialized[ 'parent_id'] ) self.assertEqual( serialized[ 'designation' ], None ) - self.assertIsInstance( serialized[ 'genome_build' ], basestring ) - self.assertIsInstance( serialized[ 'data_type' ], basestring ) + self.assertIsInstance( serialized[ 'genome_build' ], string_types ) + self.assertIsInstance( serialized[ 'data_type' ], string_types ) # hda self.assertEncodedId( serialized[ 'history_id' ] ) @@ -469,9 +467,9 @@ def test_serializers( self ): self.assertEqual( serialized[ 'api_type' ], 'file' ) self.assertEqual( serialized[ 'type' ], 'file' ) - self.assertIsInstance( serialized[ 'url' ], basestring ) + self.assertIsInstance( serialized[ 'url' ], string_types ) self.assertIsInstance( serialized[ 'urls' ], dict ) - self.assertIsInstance( serialized[ 'download_url' ], basestring ) + self.assertIsInstance( serialized[ 'download_url' ], string_types ) self.log( 'serialized should jsonify well' ) self.assertIsJsonifyable( serialized ) @@ -579,7 +577,7 @@ def test_deserialize_visible( self ): def test_deserialize_genome_build( self ): hda = self._create_vanilla_hda() - self.assertIsInstance( hda.dbkey, basestring ) + self.assertIsInstance( hda.dbkey, string_types ) self.log( 'should deserialize to "?" from None' ) self.hda_deserializer.deserialize( hda, { 'genome_build': None } ) self.assertEqual( hda.dbkey, '?' ) diff --git a/test/unit/managers/test_HDCAManager.py b/test/unit/managers/test_HDCAManager.py index 1e0956607983..cefd918b4721 100644 --- a/test/unit/managers/test_HDCAManager.py +++ b/test/unit/managers/test_HDCAManager.py @@ -6,8 +6,8 @@ test_utils = imp.load_source( 'test_utils', os.path.join( os.path.dirname( __file__), '../unittest_utils/utility.py' ) ) -from base import BaseTestCase -from base import CreatesCollectionsMixin +from .base import BaseTestCase +from .base import CreatesCollectionsMixin from galaxy.managers.histories import HistoryManager from galaxy.managers.datasets import DatasetManager @@ -72,7 +72,7 @@ def set_up_managers( self ): def test_views( self ): serializer = self.hdca_serializer item = self._create_list_hdca([ - dict( name=( "hda-{0}".format( i ) ), hid=i ) for i in xrange( 5 ) + dict( name=( "hda-{0}".format( i ) ), hid=i ) for i in range( 5 ) ]) self.log( 'should have a summary view' ) @@ -99,7 +99,7 @@ def test_views( self ): def test_views_and_keys( self ): serializer = self.hdca_serializer item = self._create_list_hdca([ - dict( name=( "hda-{0}".format( i ) ), hid=i ) for i in xrange( 5 ) + dict( name=( "hda-{0}".format( i ) ), hid=i ) for i in range( 5 ) ]) summary_plus_key = [ 'elements' ] only_keys = [ 'id', 'populated_state_message' ] diff --git a/test/unit/managers/test_HistoryContentsManager.py b/test/unit/managers/test_HistoryContentsManager.py index d35057364de5..57bbf646df27 100644 --- a/test/unit/managers/test_HistoryContentsManager.py +++ b/test/unit/managers/test_HistoryContentsManager.py @@ -1,28 +1,23 @@ # -*- coding: utf-8 -*- """ """ -import os +import datetime import imp -import unittest +import os import random -import datetime +import unittest test_utils = imp.load_source( 'test_utils', os.path.join( os.path.dirname( __file__), '../unittest_utils/utility.py' ) ) -from sqlalchemy import true -from sqlalchemy import false -from sqlalchemy import desc +from sqlalchemy import column, desc, false, true from sqlalchemy.sql import text -from sqlalchemy import column - -from base import BaseTestCase -from base import CreatesCollectionsMixin +from galaxy.managers import collections, hdas, history_contents from galaxy.managers.histories import HistoryManager -from galaxy.managers import hdas -from galaxy.managers import collections -from galaxy.managers import history_contents + +from .base import BaseTestCase +from .base import CreatesCollectionsMixin default_password = '123456' user2_data = dict( email='user2@user2.user2', username='user2', password=default_password ) @@ -62,10 +57,10 @@ def test_contents( self ): self.assertEqual( [], list( self.contents_manager.contents( history ) ) ) self.log( "calling contents on an history with hdas should return those in order of their hids" ) - hdas = [ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in xrange( 3 ) ] + hdas = [ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 3 ) ] random.shuffle( hdas ) ordered_hda_contents = list( self.contents_manager.contents( history ) ) - self.assertEqual( map( lambda hda: hda.hid, ordered_hda_contents ), [ 1, 2, 3 ] ) + self.assertEqual( [hda.hid for hda in ordered_hda_contents], [ 1, 2, 3 ] ) self.log( "calling contents on an history with both hdas and collections should return both" ) hdca = self.add_list_collection_to_history( history, hdas ) @@ -80,7 +75,7 @@ def test_contained( self ): self.assertEqual( [], list( self.contents_manager.contained( history ) ) ) self.log( "calling contained on an history with both hdas and collections should return only hdas" ) - hdas = [ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in xrange( 3 ) ] + hdas = [ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 3 ) ] self.add_list_collection_to_history( history, hdas ) self.assertEqual( list( self.contents_manager.contained( history ) ), hdas ) @@ -92,7 +87,7 @@ def test_subcontainers( self ): self.assertEqual( [], list( self.contents_manager.subcontainers( history ) ) ) self.log( "calling subcontainers on an history with both hdas and collections should return only collections" ) - hdas = [ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in xrange( 3 ) ] + hdas = [ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 3 ) ] hdca = self.add_list_collection_to_history( history, hdas ) subcontainers = list( self.contents_manager.subcontainers( history ) ) self.assertEqual( subcontainers, [ hdca ] ) @@ -101,9 +96,9 @@ def test_limit_and_offset( self ): user2 = self.user_manager.create( **user2_data ) history = self.history_manager.create( name='history', user=user2 ) contents = [] - contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in xrange( 3 ) ]) + contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 3 ) ]) contents.append( self.add_list_collection_to_history( history, contents[:3] ) ) - contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in xrange( 4, 6 ) ]) + contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 4, 6 ) ]) contents.append( self.add_list_collection_to_history( history, contents[4:6] ) ) # _subquery = self.contents_manager._contents_common_query( self.contents_manager.subcontainer_class, history.id ) @@ -130,9 +125,9 @@ def test_orm_filtering( self ): user2 = self.user_manager.create( **user2_data ) history = self.history_manager.create( name='history', user=user2 ) contents = [] - contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in xrange( 3 ) ]) + contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 3 ) ]) contents.append( self.add_list_collection_to_history( history, contents[:3] ) ) - contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in xrange( 4, 6 ) ]) + contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 4, 6 ) ]) contents.append( self.add_list_collection_to_history( history, contents[4:6] ) ) self.log( "should allow filter on deleted" ) @@ -207,9 +202,9 @@ def test_order_by( self ): user2 = self.user_manager.create( **user2_data ) history = self.history_manager.create( name='history', user=user2 ) contents = [] - contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in xrange( 3 ) ]) + contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 3 ) ]) contents.append( self.add_list_collection_to_history( history, contents[:3] ) ) - contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in xrange( 4, 6 ) ]) + contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 4, 6 ) ]) contents.append( self.add_list_collection_to_history( history, contents[4:6] ) ) self.log( "should default to hid order_by" ) @@ -240,9 +235,9 @@ def test_update_time_filter( self ): user2 = self.user_manager.create( **user2_data ) history = self.history_manager.create( name='history', user=user2 ) contents = [] - contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in xrange( 3 ) ]) + contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 3 ) ]) contents.append( self.add_list_collection_to_history( history, contents[:3] ) ) - contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in xrange( 4, 6 ) ]) + contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 4, 6 ) ]) contents.append( self.add_list_collection_to_history( history, contents[4:6] ) ) self.log( "should allow filtering by update_time" ) @@ -264,9 +259,9 @@ def test_filtered_counting( self ): user2 = self.user_manager.create( **user2_data ) history = self.history_manager.create( name='history', user=user2 ) contents = [] - contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in xrange( 3 ) ]) + contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 3 ) ]) contents.append( self.add_list_collection_to_history( history, contents[:3] ) ) - contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in xrange( 4, 6 ) ]) + contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 4, 6 ) ]) contents.append( self.add_list_collection_to_history( history, contents[4:6] ) ) self.log( "should show correct count with filters" ) @@ -292,9 +287,9 @@ def test_type_id( self ): user2 = self.user_manager.create( **user2_data ) history = self.history_manager.create( name='history', user=user2 ) contents = [] - contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in xrange( 3 ) ]) + contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 3 ) ]) contents.append( self.add_list_collection_to_history( history, contents[:3] ) ) - contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in xrange( 4, 6 ) ]) + contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 4, 6 ) ]) contents.append( self.add_list_collection_to_history( history, contents[4:6] ) ) self.log( "should be able to use eq and in with hybrid type_id" ) diff --git a/test/unit/managers/test_HistoryManager.py b/test/unit/managers/test_HistoryManager.py index b6d36a6eecf5..f9d081011e72 100644 --- a/test/unit/managers/test_HistoryManager.py +++ b/test/unit/managers/test_HistoryManager.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- """ """ -import os import imp +import os import unittest test_utils = imp.load_source( 'test_utils', @@ -10,19 +10,15 @@ import galaxy_mock import sqlalchemy +from six import string_types from sqlalchemy import true -from galaxy import model -from galaxy import exceptions - -from base import BaseTestCase +from galaxy import exceptions, model +from galaxy.managers import base, hdas +from galaxy.managers.histories import (HistoryDeserializer, HistoryFilters, + HistoryManager, HistorySerializer) -from galaxy.managers import base -from galaxy.managers.histories import HistoryManager -from galaxy.managers.histories import HistorySerializer -from galaxy.managers.histories import HistoryDeserializer -from galaxy.managers.histories import HistoryFilters -from galaxy.managers import hdas +from .base import BaseTestCase default_password = '123456' user2_data = dict( email='user2@user2.user2', username='user2', password=default_password ) @@ -260,7 +256,7 @@ def test_sharable( self ): len( self.history_manager.get_share_assocs( item1 ) ), 1 ) self.assertEqual( len( self.history_manager.get_share_assocs( item1, user=non_owner ) ), 1 ) - self.assertIsInstance( item1.slug, basestring ) + self.assertIsInstance( item1.slug, string_types ) self.log( "should be able to unshare with specific users" ) share_assoc = self.history_manager.unshare_with( item1, non_owner ) @@ -491,7 +487,7 @@ def test_history_serializers( self ): self.log( 'everything serialized should be of the proper type' ) self.assertIsInstance( serialized[ 'size' ], int ) - self.assertIsInstance( serialized[ 'nice_size' ], basestring ) + self.assertIsInstance( serialized[ 'nice_size' ], string_types ) self.log( 'serialized should jsonify well' ) self.assertIsJsonifyable( serialized ) @@ -560,7 +556,7 @@ def test_contents( self ): self.assertEqual( serialized[ 'state_details' ][ 'ok' ], 1 ) self.assertIsInstance( serialized[ 'state_ids' ][ 'ok' ], list ) self.assertIsInstance( serialized[ 'hdas' ], list ) - self.assertIsInstance( serialized[ 'hdas' ][0], basestring ) + self.assertIsInstance( serialized[ 'hdas' ][0], string_types ) serialized = self.history_serializer.serialize( history1, [ 'contents' ] ) self.assertHasKeys( serialized[ 'contents' ][0], [ 'id', 'name', 'peek', 'create_time' ]) diff --git a/test/unit/managers/test_UserManager.py b/test/unit/managers/test_UserManager.py index a4769101ac54..6e180cd6be6d 100644 --- a/test/unit/managers/test_UserManager.py +++ b/test/unit/managers/test_UserManager.py @@ -1,21 +1,20 @@ # -*- coding: utf-8 -*- """ """ -import os import imp +import os import unittest test_utils = imp.load_source( 'test_utils', os.path.join( os.path.dirname( __file__), '../unittest_utils/utility.py' ) ) import sqlalchemy +from six import string_types -from galaxy import model -from galaxy import exceptions +from galaxy import exceptions, model +from galaxy.managers import histories, users -from base import BaseTestCase -from galaxy.managers import users -from galaxy.managers import histories +from .base import BaseTestCase # ============================================================================= @@ -114,7 +113,7 @@ def test_api_keys( self ): self.log( "should be able to generate and retrieve valid api key" ) user2_api_key = self.user_manager.create_api_key( user2 ) - self.assertIsInstance( user2_api_key, basestring ) + self.assertIsInstance( user2_api_key, string_types ) self.assertEqual( self.user_manager.valid_api_key( user2 ).key, user2_api_key ) self.log( "should return the most recent (i.e. most valid) api key" ) @@ -179,7 +178,7 @@ def test_serializers( self ): # self.assertIsInstance( serialized[ 'active' ], bool ) self.assertIsInstance( serialized[ 'is_admin' ], bool ) self.assertIsInstance( serialized[ 'total_disk_usage' ], float ) - self.assertIsInstance( serialized[ 'nice_total_disk_usage' ], basestring ) + self.assertIsInstance( serialized[ 'nice_total_disk_usage' ], string_types ) self.assertIsInstance( serialized[ 'quota_percent' ], ( type( None ), float ) ) self.assertIsInstance( serialized[ 'tags_used' ], list ) self.assertIsInstance( serialized[ 'has_requests' ], bool ) @@ -209,7 +208,7 @@ def test_anonymous( self ): self.assertEqual( serialized[ 'id' ], None ) self.log( 'everything serialized should be of the proper type' ) self.assertIsInstance( serialized[ 'total_disk_usage' ], float ) - self.assertIsInstance( serialized[ 'nice_total_disk_usage' ], basestring ) + self.assertIsInstance( serialized[ 'nice_total_disk_usage' ], string_types ) self.assertIsInstance( serialized[ 'quota_percent' ], ( type( None ), float ) ) self.log( 'serialized should jsonify well' ) diff --git a/test/unit/tools/test_toolbox.py b/test/unit/tools/test_toolbox.py index 5e784a211721..ec4e004d849a 100644 --- a/test/unit/tools/test_toolbox.py +++ b/test/unit/tools/test_toolbox.py @@ -3,10 +3,12 @@ import string import unittest -from galaxy.tools import ToolBox +from six import string_types + from galaxy import model from galaxy.model import tool_shed_install from galaxy.model.tool_shed_install import mapping +from galaxy.tools import ToolBox import tools_support import routes @@ -122,7 +124,7 @@ def _add_config( self, content, name="tool_conf.xml" ): is_json = name.endswith(".json") path = self._tool_conf_path( name=name ) with open( path, "w" ) as f: - if not is_json or isinstance(content, basestring): + if not is_json or isinstance(content, string_types): f.write( content ) else: json.dump(content, f) @@ -239,7 +241,7 @@ def test_groups_tools_in_section( self ): # Assert only newer version of the tool loaded into the panel. section = self.toolbox._tool_panel["tid"] assert len(section.elems) == 1 - assert section.elems.values()[0].id == "github.com/galaxyproject/example/test_tool/0.2" + assert next(iter(section.elems.values())).id == "github.com/galaxyproject/example/test_tool/0.2" def test_group_tools_out_of_section( self ): self._init_tool() @@ -294,7 +296,7 @@ def test_workflow_in_panel( self ): encoded_id = self.app.security.encode_id( stored_workflow.id ) self._add_config( """""" % encoded_id ) assert len( self.toolbox._tool_panel ) == 1 - panel_workflow = self.toolbox._tool_panel.values()[ 0 ] + panel_workflow = next(iter(self.toolbox._tool_panel.values())) assert panel_workflow == stored_workflow.latest_workflow # TODO: test to_dict with workflows @@ -305,7 +307,7 @@ def test_workflow_in_section( self ): assert len( self.toolbox._tool_panel ) == 1 section = self.toolbox._tool_panel[ 'tid' ] assert len( section.elems ) == 1 - panel_workflow = section.elems.values()[ 0 ] + panel_workflow = next(iter(section.elems.values())) assert panel_workflow == stored_workflow.latest_workflow def test_label_in_panel( self ): @@ -334,8 +336,8 @@ def _init_tool_in_section( self, json=False ): self._add_config({"items": [section]}, name="tool_conf.json") def __check_test_labels( self, panel_dict ): - assert panel_dict.keys() == ["label_lab1", "label_lab2"] - label1 = panel_dict.values()[ 0 ] + assert list(panel_dict.keys()) == ["label_lab1", "label_lab2"] + label1 = next(iter(panel_dict.values())) assert label1.id == "lab1" assert label1.text == "Label 1" diff --git a/test/unit/visualizations/plugins/test_VisualizationPlugin.py b/test/unit/visualizations/plugins/test_VisualizationPlugin.py index 766bbd8ef7e6..ad418f7180c6 100644 --- a/test/unit/visualizations/plugins/test_VisualizationPlugin.py +++ b/test/unit/visualizations/plugins/test_VisualizationPlugin.py @@ -1,10 +1,12 @@ """ Test lib/galaxy/visualization/plugins/plugin. """ -import os import imp +import os import unittest +from six import string_types + test_utils = imp.load_source( 'test_utils', os.path.join( os.path.dirname( __file__), os.pardir, os.pardir, 'unittest_utils', 'utility.py' ) ) import galaxy_mock @@ -187,7 +189,7 @@ def test_render( self ): plugin.template_lookup = plugin._build_template_lookup( mock_app_dir.root_path ) response = plugin.render( trans=galaxy_mock.MockTrans( app=mock_app ) ) - self.assertIsInstance( response, basestring ) + self.assertIsInstance( response, string_types ) self.assertEqual( response.strip(), "True" ) diff --git a/test/unit/visualizations/plugins/test_VisualizationsRegistry.py b/test/unit/visualizations/plugins/test_VisualizationsRegistry.py index 1589b99f426b..3e1b9bb12e3a 100644 --- a/test/unit/visualizations/plugins/test_VisualizationsRegistry.py +++ b/test/unit/visualizations/plugins/test_VisualizationsRegistry.py @@ -1,17 +1,19 @@ """ Test lib/galaxy/visualization/plugins/registry. """ -import os import imp +import os import re +from six import string_types + test_utils = imp.load_source( 'test_utils', os.path.join( os.path.dirname( __file__), os.pardir, os.pardir, 'unittest_utils', 'utility.py' ) ) import galaxy_mock from galaxy import model -from galaxy.visualization.plugins.registry import VisualizationsRegistry from galaxy.visualization.plugins import plugin +from galaxy.visualization.plugins.registry import VisualizationsRegistry # ----------------------------------------------------------------------------- glx_dir = test_utils.get_galaxy_root() @@ -50,7 +52,7 @@ def test_plugin_load_from_repo( self ): expected_plugins_path = os.path.join( glx_dir, vis_reg_path ) self.assertEqual( plugin_mgr.base_url, 'visualizations' ) - self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] ) + self.assertEqual( plugin_mgr.directories, [ expected_plugins_path ] ) scatterplot = plugin_mgr.plugins[ 'scatterplot' ] self.assertEqual( scatterplot.name, 'scatterplot' ) @@ -116,8 +118,8 @@ def test_plugin_load( self ): expected_plugin_names = [ 'vis1', 'vis2' ] self.assertEqual( plugin_mgr.base_url, 'visualizations' ) - self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] ) - self.assertItemsEqual( plugin_mgr.plugins.keys(), expected_plugin_names ) + self.assertEqual( plugin_mgr.directories, [ expected_plugins_path ] ) + self.assertEqual( sorted(plugin_mgr.plugins.keys()), expected_plugin_names ) vis1 = plugin_mgr.plugins[ 'vis1' ] self.assertEqual( vis1.name, 'vis1' ) @@ -191,8 +193,8 @@ def test_interactive_environ_plugin_load( self ): expected_plugin_names = [ 'ipython' ] self.assertEqual( plugin_mgr.base_url, 'visualizations' ) - self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] ) - self.assertItemsEqual( plugin_mgr.plugins.keys(), expected_plugin_names ) + self.assertEqual( plugin_mgr.directories, [ expected_plugins_path ] ) + self.assertEqual( sorted(plugin_mgr.plugins.keys()), expected_plugin_names ) ipython = plugin_mgr.plugins[ 'ipython' ] config = ipython.config @@ -209,7 +211,7 @@ def test_interactive_environ_plugin_load( self ): # should return the (new) api key for the above user (see the template above) response = ipython._render( {}, trans=trans ) response.strip() - self.assertIsInstance( response, basestring ) + self.assertIsInstance( response, string_types ) self.assertTrue( '-' in response ) ie_request, api_key = response.split( '-' ) diff --git a/test/unit/web/base/test_HookPluginManager.py b/test/unit/web/base/test_HookPluginManager.py index 5baad46fa803..9fae2144353f 100644 --- a/test/unit/web/base/test_HookPluginManager.py +++ b/test/unit/web/base/test_HookPluginManager.py @@ -87,8 +87,8 @@ def test_loading_point( self ): app_path = mock_app_dir.root_path expected_plugins_path = os.path.join( app_path, 'plugins' ) - self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] ) - self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1' ] ) + self.assertEqual( plugin_mgr.directories, [ expected_plugins_path ] ) + self.assertEqual( plugin_mgr.plugins.keys(), [ 'plugin1' ] ) plugin = plugin_mgr.plugins[ 'plugin1' ] self.assertEqual( plugin.name, 'plugin1' ) @@ -114,7 +114,7 @@ def test_bad_loading_points( self ): app_path = mock_app_dir.root_path expected_plugins_path = os.path.join( app_path, 'plugins' ) - self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] ) + self.assertEqual( plugin_mgr.directories, [ expected_plugins_path ] ) self.assertEqual( plugin_mgr.plugins.keys(), [] ) mock_app_dir.remove() @@ -134,7 +134,7 @@ def test_bad_import( self ): app_path = mock_app_dir.root_path expected_plugins_path = os.path.join( app_path, 'plugins' ) - self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] ) + self.assertEqual( plugin_mgr.directories, [ expected_plugins_path ] ) self.assertEqual( plugin_mgr.plugins.keys(), [] ) mock_app_dir.remove() @@ -155,8 +155,8 @@ def test_import_w_rel_import( self ): app_path = mock_app_dir.root_path expected_plugins_path = os.path.join( app_path, 'plugins' ) - self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] ) - self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1' ] ) + self.assertEqual( plugin_mgr.directories, [ expected_plugins_path ] ) + self.assertEqual( plugin_mgr.plugins.keys(), [ 'plugin1' ] ) plugin = plugin_mgr.plugins[ 'plugin1' ] self.assertEqual( plugin.name, 'plugin1' ) @@ -181,8 +181,8 @@ def test_import_w_galaxy_import( self ): app_path = mock_app_dir.root_path expected_plugins_path = os.path.join( app_path, 'plugins' ) - self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] ) - self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1' ] ) + self.assertEqual( plugin_mgr.directories, [ expected_plugins_path ] ) + self.assertEqual( plugin_mgr.plugins.keys(), [ 'plugin1' ] ) plugin = plugin_mgr.plugins[ 'plugin1' ] self.assertEqual( plugin.name, 'plugin1' ) @@ -207,7 +207,7 @@ def test_run_hooks( self ): }) mock_app = galaxy_mock.MockApp( root=mock_app_dir.root_path ) plugin_mgr = HookPluginManager( mock_app, directories_setting='plugins', skip_bad_plugins=False ) - self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1', 'plugin2' ] ) + self.assertEqual( sorted(plugin_mgr.plugins.keys()), [ 'plugin1', 'plugin2' ] ) return_val_dict = plugin_mgr.run_hook( 'blah', 'one two check' ) self.assertEqual( return_val_dict, { 'plugin1': 'One Two Check', 'plugin2': 'ONE TWO CHECK' } ) @@ -234,7 +234,7 @@ def test_hook_errs( self ): }) mock_app = galaxy_mock.MockApp( root=mock_app_dir.root_path ) plugin_mgr = HookPluginManager( mock_app, directories_setting='plugins', skip_bad_plugins=False ) - self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1', 'plugin2', 'plugin3' ] ) + self.assertEqual( sorted(plugin_mgr.plugins.keys()), [ 'plugin1', 'plugin2', 'plugin3' ] ) return_val_dict = plugin_mgr.run_hook( 'blah', 'one two check' ) self.assertEqual( return_val_dict, { 'plugin1': 'One Two Check', 'plugin2': 'ONE TWO CHECK' } ) diff --git a/test/unit/web/base/test_PageServingPluginManager.py b/test/unit/web/base/test_PageServingPluginManager.py index 816927578f20..f599c8ee25e2 100644 --- a/test/unit/web/base/test_PageServingPluginManager.py +++ b/test/unit/web/base/test_PageServingPluginManager.py @@ -45,8 +45,8 @@ def test_plugin_load( self ): expected_plugins_path = os.path.join( app_path, 'plugins' ) self.assertEqual( plugin_mgr.base_url, 'test' ) - self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] ) - self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1', 'plugin2', 'plugin3' ] ) + self.assertEqual( plugin_mgr.directories, [ expected_plugins_path ] ) + self.assertEqual( sorted(plugin_mgr.plugins.keys()), [ 'plugin1', 'plugin2', 'plugin3' ] ) plugin1 = plugin_mgr.plugins[ 'plugin1' ] self.assertEqual( plugin1.name, 'plugin1' ) @@ -92,7 +92,7 @@ def test_plugin_static_map( self ): mock_app = galaxy_mock.MockApp( root=mock_app_dir.root_path ) plugin_mgr = PageServingPluginManager( mock_app, 'test', directories_setting='plugins' ) - self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1' ] ) + self.assertEqual( plugin_mgr.plugins.keys(), [ 'plugin1' ] ) plugin = plugin_mgr.plugins[ 'plugin1' ] self.assertEqual( plugin_mgr.get_static_urls_and_paths(), [( plugin.static_url, plugin.static_path )] ) @@ -112,7 +112,7 @@ def test_plugin_templates( self ): mock_app = galaxy_mock.MockApp( root=mock_app_dir.root_path ) plugin_mgr = PageServingPluginManager( mock_app, 'test', directories_setting='plugins' ) - self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1' ] ) + self.assertEqual( plugin_mgr.plugins.keys(), [ 'plugin1' ] ) plugin = plugin_mgr.plugins[ 'plugin1' ] rendered = plugin_mgr.fill_template( galaxy_mock.MockTrans(), plugin, 'test.mako', what='Hey', you='Ho', say='HeyHey HoHo' ) diff --git a/test/unit/web/base/test_PluginManager.py b/test/unit/web/base/test_PluginManager.py index 381288dbba16..815493fe43f0 100644 --- a/test/unit/web/base/test_PluginManager.py +++ b/test/unit/web/base/test_PluginManager.py @@ -32,8 +32,8 @@ def test_rel_path_search( self ): app_path = mock_app_dir.root_path expected_plugins_path = os.path.join( app_path, 'plugins' ) - self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] ) - self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1', 'plugin2' ] ) + self.assertEqual( plugin_mgr.directories, [ expected_plugins_path ] ) + self.assertEqual( sorted(plugin_mgr.plugins.keys()), [ 'plugin1', 'plugin2' ] ) self.assertEqual( plugin_mgr.plugins[ 'plugin1' ].name, 'plugin1' ) self.assertEqual( plugin_mgr.plugins[ 'plugin1' ].path, os.path.join( expected_plugins_path, 'plugin1' ) ) self.assertEqual( plugin_mgr.plugins[ 'plugin2' ].name, 'plugin2' ) @@ -53,8 +53,8 @@ def test_abs_path_search( self ): plugin_mgr = PluginManager( mock_app, directories_setting=mock_plugin_dir.root_path ) expected_plugins_path = mock_plugin_dir.root_path - self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] ) - self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1', 'plugin2' ] ) + self.assertEqual( plugin_mgr.directories, [ expected_plugins_path ] ) + self.assertEqual( sorted(plugin_mgr.plugins.keys()), [ 'plugin1', 'plugin2' ] ) self.assertEqual( plugin_mgr.plugins[ 'plugin1' ].name, 'plugin1' ) self.assertEqual( plugin_mgr.plugins[ 'plugin1' ].path, os.path.join( expected_plugins_path, 'plugin1' ) ) self.assertEqual( plugin_mgr.plugins[ 'plugin2' ].name, 'plugin2' ) @@ -82,8 +82,8 @@ def test_multiple_dirs( self ): expected_plugins_rel_path = os.path.join( app_path, 'plugins' ) expected_plugins_abs_path = mock_abs_plugin_dir.root_path - self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_rel_path, expected_plugins_abs_path ] ) - self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1', 'plugin2', 'plugin3', 'plugin4' ] ) + self.assertEqual( sorted(plugin_mgr.directories), sorted([ expected_plugins_rel_path, expected_plugins_abs_path ]) ) + self.assertEqual( sorted(plugin_mgr.plugins.keys()), [ 'plugin1', 'plugin2', 'plugin3', 'plugin4' ] ) self.assertEqual( plugin_mgr.plugins[ 'plugin1' ].name, 'plugin1' ) self.assertEqual( plugin_mgr.plugins[ 'plugin1' ].path, os.path.join( expected_plugins_rel_path, 'plugin1' ) ) self.assertEqual( plugin_mgr.plugins[ 'plugin2' ].name, 'plugin2' ) From fb116f5715d9cd291d2fdf956b5a3a91b3e84296 Mon Sep 17 00:00:00 2001 From: Anthony Bretaudeau Date: Wed, 29 Jun 2016 14:04:43 +0200 Subject: [PATCH 65/86] fix missing element_identifier with data_collection input parama --- lib/galaxy/tools/wrappers.py | 4 +-- test/api/test_tools.py | 9 +++--- .../tools/identifier_conditional.xml | 15 ++++++++++ .../identifier_multiple_in_conditional.xml | 28 ------------------- test/functional/tools/samples_tool_conf.xml | 2 +- 5 files changed, 22 insertions(+), 36 deletions(-) create mode 100644 test/functional/tools/identifier_conditional.xml delete mode 100644 test/functional/tools/identifier_multiple_in_conditional.xml diff --git a/lib/galaxy/tools/wrappers.py b/lib/galaxy/tools/wrappers.py index 0acf136163aa..885702d04b3d 100644 --- a/lib/galaxy/tools/wrappers.py +++ b/lib/galaxy/tools/wrappers.py @@ -307,7 +307,7 @@ def to_wrapper( dataset ): element = dataset dataset = element.dataset_instance kwargs["identifier"] = element.element_identifier - return self._dataset_wrapper( dataset, dataset_paths, **kwargs ) + return self._dataset_wrapper( dataset, dataset_paths, identifier = element_identifier, **kwargs ) list.__init__( self, map( to_wrapper, datasets ) ) self.job_working_directory = job_working_directory @@ -365,7 +365,7 @@ def __init__( self, job_working_directory, has_collection, dataset_paths=[], **k if dataset_collection_element.is_collection: element_wrapper = DatasetCollectionWrapper(job_working_directory, dataset_collection_element, dataset_paths, **kwargs ) else: - element_wrapper = self._dataset_wrapper( element_object, dataset_paths, **kwargs) + element_wrapper = self._dataset_wrapper( element_object, dataset_paths, identifier=element_identifier, **kwargs) element_instances[element_identifier] = element_wrapper element_instance_list.append( element_wrapper ) diff --git a/test/api/test_tools.py b/test/api/test_tools.py index 535dc56f182b..7c0f68b1d2c7 100644 --- a/test/api/test_tools.py +++ b/test/api/test_tools.py @@ -806,8 +806,8 @@ def test_identifier_with_multiple_normal_datasets( self ): output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 ) self.assertEquals( output1_content.strip(), "Pasted Entry\nPasted Entry" ) - @skip_without_tool( "identifier_multiple_in_conditional" ) - def test_identifier_with_multiple_normal_datasets_in_conditional( self ): + @skip_without_tool( "identifier_collection" ) + def test_identifier_with_data_collection( self ): history_id = self.dataset_populator.new_history() element_identifiers = self.dataset_collection_populator.list_identifiers( history_id ) @@ -823,12 +823,11 @@ def test_identifier_with_multiple_normal_datasets_in_conditional( self ): dataset_collection = create_response.json() inputs = { - "options|choice": "one", - "options|input1": {'src': 'hdca', 'id': dataset_collection['id']}, + "input1": {'src': 'hdca', 'id': dataset_collection['id']}, } self.dataset_populator.wait_for_history( history_id, assert_ok=True ) - create_response = self._run( "identifier_multiple_in_conditional", history_id, inputs ) + create_response = self._run( "identifier_conditional", history_id, inputs ) self._assert_status_code_is( create_response, 200 ) create = create_response.json() outputs = create[ 'outputs' ] diff --git a/test/functional/tools/identifier_conditional.xml b/test/functional/tools/identifier_conditional.xml new file mode 100644 index 000000000000..ff548d679f61 --- /dev/null +++ b/test/functional/tools/identifier_conditional.xml @@ -0,0 +1,15 @@ + + + #for $input in $input1: + echo '$input.element_identifier' >> 'output1'; + #end for + + + + + + + + + + diff --git a/test/functional/tools/identifier_multiple_in_conditional.xml b/test/functional/tools/identifier_multiple_in_conditional.xml deleted file mode 100644 index ff02e2840557..000000000000 --- a/test/functional/tools/identifier_multiple_in_conditional.xml +++ /dev/null @@ -1,28 +0,0 @@ - - - #if $options.choice == "one": - #for $input in $options.input1: - echo '$input.element_identifier' >> 'output1'; - #end for - #end if - - - - - - - - - - - - - - - - - - - - - diff --git a/test/functional/tools/samples_tool_conf.xml b/test/functional/tools/samples_tool_conf.xml index 16ed1fecf649..112aa9da8c8d 100644 --- a/test/functional/tools/samples_tool_conf.xml +++ b/test/functional/tools/samples_tool_conf.xml @@ -64,7 +64,7 @@ - + From cf29b1dcd60c8dcc39cfc7e507572f08ff86347d Mon Sep 17 00:00:00 2001 From: Anthony Bretaudeau Date: Wed, 29 Jun 2016 15:20:22 +0200 Subject: [PATCH 66/86] apply comments --- lib/galaxy/tools/wrappers.py | 2 +- test/api/test_tools.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/galaxy/tools/wrappers.py b/lib/galaxy/tools/wrappers.py index 885702d04b3d..6d203ee38602 100644 --- a/lib/galaxy/tools/wrappers.py +++ b/lib/galaxy/tools/wrappers.py @@ -307,7 +307,7 @@ def to_wrapper( dataset ): element = dataset dataset = element.dataset_instance kwargs["identifier"] = element.element_identifier - return self._dataset_wrapper( dataset, dataset_paths, identifier = element_identifier, **kwargs ) + return self._dataset_wrapper( dataset, dataset_paths, **kwargs ) list.__init__( self, map( to_wrapper, datasets ) ) self.job_working_directory = job_working_directory diff --git a/test/api/test_tools.py b/test/api/test_tools.py index 7c0f68b1d2c7..eb3a0808525b 100644 --- a/test/api/test_tools.py +++ b/test/api/test_tools.py @@ -832,7 +832,6 @@ def test_identifier_with_data_collection( self ): create = create_response.json() outputs = create[ 'outputs' ] jobs = create[ 'jobs' ] - implicit_collections = create[ 'implicit_collections' ] self.assertEquals( len( jobs ), 1 ) self.assertEquals( len( outputs ), 1 ) output1 = outputs[ 0 ] From d9e3d69b78b863823302d59af9bf0eeeb5cba1e8 Mon Sep 17 00:00:00 2001 From: Ben Fulton Date: Wed, 29 Jun 2016 10:57:00 -0400 Subject: [PATCH 67/86] Add password_expiration_period to toolshed config This is necessary as it shares the user class with the galaxy app. --- lib/galaxy/webapps/tool_shed/config.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/galaxy/webapps/tool_shed/config.py b/lib/galaxy/webapps/tool_shed/config.py index d4159638e8b7..bb9d61571ce5 100644 --- a/lib/galaxy/webapps/tool_shed/config.py +++ b/lib/galaxy/webapps/tool_shed/config.py @@ -7,6 +7,7 @@ import logging import logging.config import ConfigParser +from datetime import timedelta from galaxy.util import string_as_bool from galaxy.web.formatting import expand_pretty_datetime_format from galaxy.version import VERSION, VERSION_MAJOR @@ -148,6 +149,7 @@ def __init__( self, **kwargs ): self.citation_cache_type = kwargs.get( "citation_cache_type", "file" ) self.citation_cache_data_dir = resolve_path( kwargs.get( "citation_cache_data_dir", "database/tool_shed_citations/data" ), self.root ) self.citation_cache_lock_dir = resolve_path( kwargs.get( "citation_cache_lock_dir", "database/tool_shed_citations/locks" ), self.root ) + self.password_expiration_period = timedelta(days=int(kwargs.get("password_expiration_period", 0))) @property def shed_tool_data_path( self ): From fe22e1ec44142980d97c3a81391f654e427d50d0 Mon Sep 17 00:00:00 2001 From: Pablo Moreno Date: Wed, 29 Jun 2016 16:30:35 +0100 Subject: [PATCH 68/86] Adds the ability to use the Kubernetes runner with Galaxy being executed inside Kubernetes (uses service account authentication). --- config/job_conf.xml.sample_advanced | 9 +++++++-- lib/galaxy/jobs/runners/kubernetes.py | 6 ++++-- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/config/job_conf.xml.sample_advanced b/config/job_conf.xml.sample_advanced index cf9cab73d804..d90cf920cafa 100644 --- a/config/job_conf.xml.sample_advanced +++ b/config/job_conf.xml.sample_advanced @@ -174,9 +174,14 @@ --> /path/to/kubeconfig - + etc. This parameter is not necessary and ignored if k8s_use_service_account is set to True --> + + false + galaxy_pvc

          \n\t*

          Sparkline:

          \n\t* $('.sparkline').sparkline();\n\t*\n\t* For line charts, x values can also be specified:\n\t*

          Sparkline: 1:1,2.7:4,3.4:6,5:6,6:8,8.7:5,9:3,10:5

          \n\t* $('#sparkline1').sparkline([ [1,1], [2.7,4], [3.4,6], [5,6], [6,8], [8.7,5], [9,3], [10,5] ])\n\t*\n\t* By default, options should be passed in as teh second argument to the sparkline function:\n\t* $('.sparkline').sparkline([1,2,3,4], {type: 'bar'})\n\t*\n\t* Options can also be set by passing them on the tag itself. This feature is disabled by default though\n\t* as there's a slight performance overhead:\n\t* $('.sparkline').sparkline([1,2,3,4], {enableTagOptions: true})\n\t*

          Sparkline: loading

          \n\t* Prefix all options supplied as tag attribute with \"spark\" (configurable by setting tagOptionPrefix)\n\t*\n\t* Supported options:\n\t* lineColor - Color of the line used for the chart\n\t* fillColor - Color used to fill in the chart - Set to '' or false for a transparent chart\n\t* width - Width of the chart - Defaults to 3 times the number of values in pixels\n\t* height - Height of the chart - Defaults to the height of the containing element\n\t* chartRangeMin - Specify the minimum value to use for the Y range of the chart - Defaults to the minimum value supplied\n\t* chartRangeMax - Specify the maximum value to use for the Y range of the chart - Defaults to the maximum value supplied\n\t* chartRangeClip - Clip out of range values to the max/min specified by chartRangeMin and chartRangeMax\n\t* chartRangeMinX - Specify the minimum value to use for the X range of the chart - Defaults to the minimum value supplied\n\t* chartRangeMaxX - Specify the maximum value to use for the X range of the chart - Defaults to the maximum value supplied\n\t* composite - If true then don't erase any existing chart attached to the tag, but draw\n\t* another chart over the top - Note that width and height are ignored if an\n\t* existing chart is detected.\n\t* tagValuesAttribute - Name of tag attribute to check for data values - Defaults to 'values'\n\t* enableTagOptions - Whether to check tags for sparkline options\n\t* tagOptionPrefix - Prefix used for options supplied as tag attributes - Defaults to 'spark'\n\t* disableHiddenCheck - If set to true, then the plugin will assume that charts will never be drawn into a\n\t* hidden dom element, avoding a browser reflow\n\t* disableInteraction - If set to true then all mouseover/click interaction behaviour will be disabled,\n\t* making the plugin perform much like it did in 1.x\n\t* disableTooltips - If set to true then tooltips will be disabled - Defaults to false (tooltips enabled)\n\t* disableHighlight - If set to true then highlighting of selected chart elements on mouseover will be disabled\n\t* defaults to false (highlights enabled)\n\t* highlightLighten - Factor to lighten/darken highlighted chart values by - Defaults to 1.4 for a 40% increase\n\t* tooltipContainer - Specify which DOM element the tooltip should be rendered into - defaults to document.body\n\t* tooltipClassname - Optional CSS classname to apply to tooltips - If not specified then a default style will be applied\n\t* tooltipOffsetX - How many pixels away from the mouse pointer to render the tooltip on the X axis\n\t* tooltipOffsetY - How many pixels away from the mouse pointer to render the tooltip on the r axis\n\t* tooltipFormatter - Optional callback that allows you to override the HTML displayed in the tooltip\n\t* callback is given arguments of (sparkline, options, fields)\n\t* tooltipChartTitle - If specified then the tooltip uses the string specified by this setting as a title\n\t* tooltipFormat - A format string or SPFormat object (or an array thereof for multiple entries)\n\t* to control the format of the tooltip\n\t* tooltipPrefix - A string to prepend to each field displayed in a tooltip\n\t* tooltipSuffix - A string to append to each field displayed in a tooltip\n\t* tooltipSkipNull - If true then null values will not have a tooltip displayed (defaults to true)\n\t* tooltipValueLookups - An object or range map to map field values to tooltip strings\n\t* (eg. to map -1 to \"Lost\", 0 to \"Draw\", and 1 to \"Win\")\n\t* numberFormatter - Optional callback for formatting numbers in tooltips\n\t* numberDigitGroupSep - Character to use for group separator in numbers \"1,234\" - Defaults to \",\"\n\t* numberDecimalMark - Character to use for the decimal point when formatting numbers - Defaults to \".\"\n\t* numberDigitGroupCount - Number of digits between group separator - Defaults to 3\n\t*\n\t* There are 7 types of sparkline, selected by supplying a \"type\" option of 'line' (default),\n\t* 'bar', 'tristate', 'bullet', 'discrete', 'pie' or 'box'\n\t* line - Line chart. Options:\n\t* spotColor - Set to '' to not end each line in a circular spot\n\t* minSpotColor - If set, color of spot at minimum value\n\t* maxSpotColor - If set, color of spot at maximum value\n\t* spotRadius - Radius in pixels\n\t* lineWidth - Width of line in pixels\n\t* normalRangeMin\n\t* normalRangeMax - If set draws a filled horizontal bar between these two values marking the \"normal\"\n\t* or expected range of values\n\t* normalRangeColor - Color to use for the above bar\n\t* drawNormalOnTop - Draw the normal range above the chart fill color if true\n\t* defaultPixelsPerValue - Defaults to 3 pixels of width for each value in the chart\n\t* highlightSpotColor - The color to use for drawing a highlight spot on mouseover - Set to null to disable\n\t* highlightLineColor - The color to use for drawing a highlight line on mouseover - Set to null to disable\n\t* valueSpots - Specify which points to draw spots on, and in which color. Accepts a range map\n\t*\n\t* bar - Bar chart. Options:\n\t* barColor - Color of bars for postive values\n\t* negBarColor - Color of bars for negative values\n\t* zeroColor - Color of bars with zero values\n\t* nullColor - Color of bars with null values - Defaults to omitting the bar entirely\n\t* barWidth - Width of bars in pixels\n\t* colorMap - Optional mappnig of values to colors to override the *BarColor values above\n\t* can be an Array of values to control the color of individual bars or a range map\n\t* to specify colors for individual ranges of values\n\t* barSpacing - Gap between bars in pixels\n\t* zeroAxis - Centers the y-axis around zero if true\n\t*\n\t* tristate - Charts values of win (>0), lose (<0) or draw (=0)\n\t* posBarColor - Color of win values\n\t* negBarColor - Color of lose values\n\t* zeroBarColor - Color of draw values\n\t* barWidth - Width of bars in pixels\n\t* barSpacing - Gap between bars in pixels\n\t* colorMap - Optional mappnig of values to colors to override the *BarColor values above\n\t* can be an Array of values to control the color of individual bars or a range map\n\t* to specify colors for individual ranges of values\n\t*\n\t* discrete - Options:\n\t* lineHeight - Height of each line in pixels - Defaults to 30% of the graph height\n\t* thesholdValue - Values less than this value will be drawn using thresholdColor instead of lineColor\n\t* thresholdColor\n\t*\n\t* bullet - Values for bullet graphs msut be in the order: target, performance, range1, range2, range3, ...\n\t* options:\n\t* targetColor - The color of the vertical target marker\n\t* targetWidth - The width of the target marker in pixels\n\t* performanceColor - The color of the performance measure horizontal bar\n\t* rangeColors - Colors to use for each qualitative range background color\n\t*\n\t* pie - Pie chart. Options:\n\t* sliceColors - An array of colors to use for pie slices\n\t* offset - Angle in degrees to offset the first slice - Try -90 or +90\n\t* borderWidth - Width of border to draw around the pie chart, in pixels - Defaults to 0 (no border)\n\t* borderColor - Color to use for the pie chart border - Defaults to #000\n\t*\n\t* box - Box plot. Options:\n\t* raw - Set to true to supply pre-computed plot points as values\n\t* values should be: low_outlier, low_whisker, q1, median, q3, high_whisker, high_outlier\n\t* When set to false you can supply any number of values and the box plot will\n\t* be computed for you. Default is false.\n\t* showOutliers - Set to true (default) to display outliers as circles\n\t* outlierIQR - Interquartile range used to determine outliers. Default 1.5\n\t* boxLineColor - Outline color of the box\n\t* boxFillColor - Fill color for the box\n\t* whiskerColor - Line color used for whiskers\n\t* outlierLineColor - Outline color of outlier circles\n\t* outlierFillColor - Fill color of the outlier circles\n\t* spotRadius - Radius of outlier circles\n\t* medianColor - Line color of the median line\n\t* target - Draw a target cross hair at the supplied value (default undefined)\n\t*\n\t*\n\t*\n\t* Examples:\n\t* $('#sparkline1').sparkline(myvalues, { lineColor: '#f00', fillColor: false });\n\t* $('.barsparks').sparkline('html', { type:'bar', height:'40px', barWidth:5 });\n\t* $('#tristate').sparkline([1,1,-1,1,0,0,-1], { type:'tristate' }):\n\t* $('#discrete').sparkline([1,3,4,5,5,3,4,5], { type:'discrete' });\n\t* $('#bullet').sparkline([10,12,12,9,7], { type:'bullet' });\n\t* $('#pie').sparkline([1,1,2], { type:'pie' });\n\t*/\n\t\n\t/*jslint regexp: true, browser: true, jquery: true, white: true, nomen: false, plusplus: false, maxerr: 500, indent: 4 */\n\t\n\t(function(document, Math, undefined) { // performance/minified-size optimization\n\t(function(factory) {\n\t if(true) {\n\t !(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(1)], __WEBPACK_AMD_DEFINE_FACTORY__ = (factory), __WEBPACK_AMD_DEFINE_RESULT__ = (typeof __WEBPACK_AMD_DEFINE_FACTORY__ === 'function' ? (__WEBPACK_AMD_DEFINE_FACTORY__.apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__)) : __WEBPACK_AMD_DEFINE_FACTORY__), __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));\n\t } else if (jQuery && !jQuery.fn.sparkline) {\n\t factory(jQuery);\n\t }\n\t}\n\t(function($) {\n\t 'use strict';\n\t\n\t var UNSET_OPTION = {},\n\t getDefaults, createClass, SPFormat, clipval, quartile, normalizeValue, normalizeValues,\n\t remove, isNumber, all, sum, addCSS, ensureArray, formatNumber, RangeMap,\n\t MouseHandler, Tooltip, barHighlightMixin,\n\t line, bar, tristate, discrete, bullet, pie, box, defaultStyles, initStyles,\n\t VShape, VCanvas_base, VCanvas_canvas, VCanvas_vml, pending, shapeCount = 0;\n\t\n\t /**\n\t * Default configuration settings\n\t */\n\t getDefaults = function () {\n\t return {\n\t // Settings common to most/all chart types\n\t common: {\n\t type: 'line',\n\t lineColor: '#00f',\n\t fillColor: '#cdf',\n\t defaultPixelsPerValue: 3,\n\t width: 'auto',\n\t height: 'auto',\n\t composite: false,\n\t tagValuesAttribute: 'values',\n\t tagOptionsPrefix: 'spark',\n\t enableTagOptions: false,\n\t enableHighlight: true,\n\t highlightLighten: 1.4,\n\t tooltipSkipNull: true,\n\t tooltipPrefix: '',\n\t tooltipSuffix: '',\n\t disableHiddenCheck: false,\n\t numberFormatter: false,\n\t numberDigitGroupCount: 3,\n\t numberDigitGroupSep: ',',\n\t numberDecimalMark: '.',\n\t disableTooltips: false,\n\t disableInteraction: false\n\t },\n\t // Defaults for line charts\n\t line: {\n\t spotColor: '#f80',\n\t highlightSpotColor: '#5f5',\n\t highlightLineColor: '#f22',\n\t spotRadius: 1.5,\n\t minSpotColor: '#f80',\n\t maxSpotColor: '#f80',\n\t lineWidth: 1,\n\t normalRangeMin: undefined,\n\t normalRangeMax: undefined,\n\t normalRangeColor: '#ccc',\n\t drawNormalOnTop: false,\n\t chartRangeMin: undefined,\n\t chartRangeMax: undefined,\n\t chartRangeMinX: undefined,\n\t chartRangeMaxX: undefined,\n\t tooltipFormat: new SPFormat(' {{prefix}}{{y}}{{suffix}}')\n\t },\n\t // Defaults for bar charts\n\t bar: {\n\t barColor: '#3366cc',\n\t negBarColor: '#f44',\n\t stackedBarColor: ['#3366cc', '#dc3912', '#ff9900', '#109618', '#66aa00',\n\t '#dd4477', '#0099c6', '#990099'],\n\t zeroColor: undefined,\n\t nullColor: undefined,\n\t zeroAxis: true,\n\t barWidth: 4,\n\t barSpacing: 1,\n\t chartRangeMax: undefined,\n\t chartRangeMin: undefined,\n\t chartRangeClip: false,\n\t colorMap: undefined,\n\t tooltipFormat: new SPFormat(' {{prefix}}{{value}}{{suffix}}')\n\t },\n\t // Defaults for tristate charts\n\t tristate: {\n\t barWidth: 4,\n\t barSpacing: 1,\n\t posBarColor: '#6f6',\n\t negBarColor: '#f44',\n\t zeroBarColor: '#999',\n\t colorMap: {},\n\t tooltipFormat: new SPFormat(' {{value:map}}'),\n\t tooltipValueLookups: { map: { '-1': 'Loss', '0': 'Draw', '1': 'Win' } }\n\t },\n\t // Defaults for discrete charts\n\t discrete: {\n\t lineHeight: 'auto',\n\t thresholdColor: undefined,\n\t thresholdValue: 0,\n\t chartRangeMax: undefined,\n\t chartRangeMin: undefined,\n\t chartRangeClip: false,\n\t tooltipFormat: new SPFormat('{{prefix}}{{value}}{{suffix}}')\n\t },\n\t // Defaults for bullet charts\n\t bullet: {\n\t targetColor: '#f33',\n\t targetWidth: 3, // width of the target bar in pixels\n\t performanceColor: '#33f',\n\t rangeColors: ['#d3dafe', '#a8b6ff', '#7f94ff'],\n\t base: undefined, // set this to a number to change the base start number\n\t tooltipFormat: new SPFormat('{{fieldkey:fields}} - {{value}}'),\n\t tooltipValueLookups: { fields: {r: 'Range', p: 'Performance', t: 'Target'} }\n\t },\n\t // Defaults for pie charts\n\t pie: {\n\t offset: 0,\n\t sliceColors: ['#3366cc', '#dc3912', '#ff9900', '#109618', '#66aa00',\n\t '#dd4477', '#0099c6', '#990099'],\n\t borderWidth: 0,\n\t borderColor: '#000',\n\t tooltipFormat: new SPFormat(' {{value}} ({{percent.1}}%)')\n\t },\n\t // Defaults for box plots\n\t box: {\n\t raw: false,\n\t boxLineColor: '#000',\n\t boxFillColor: '#cdf',\n\t whiskerColor: '#000',\n\t outlierLineColor: '#333',\n\t outlierFillColor: '#fff',\n\t medianColor: '#f00',\n\t showOutliers: true,\n\t outlierIQR: 1.5,\n\t spotRadius: 1.5,\n\t target: undefined,\n\t targetColor: '#4a2',\n\t chartRangeMax: undefined,\n\t chartRangeMin: undefined,\n\t tooltipFormat: new SPFormat('{{field:fields}}: {{value}}'),\n\t tooltipFormatFieldlistKey: 'field',\n\t tooltipValueLookups: { fields: { lq: 'Lower Quartile', med: 'Median',\n\t uq: 'Upper Quartile', lo: 'Left Outlier', ro: 'Right Outlier',\n\t lw: 'Left Whisker', rw: 'Right Whisker'} }\n\t }\n\t };\n\t };\n\t\n\t // You can have tooltips use a css class other than jqstooltip by specifying tooltipClassname\n\t defaultStyles = '.jqstooltip { ' +\n\t 'position: absolute;' +\n\t 'left: 0px;' +\n\t 'top: 0px;' +\n\t 'visibility: hidden;' +\n\t 'background: rgb(0, 0, 0) transparent;' +\n\t 'background-color: rgba(0,0,0,0.6);' +\n\t 'filter:progid:DXImageTransform.Microsoft.gradient(startColorstr=#99000000, endColorstr=#99000000);' +\n\t '-ms-filter: \"progid:DXImageTransform.Microsoft.gradient(startColorstr=#99000000, endColorstr=#99000000)\";' +\n\t 'color: white;' +\n\t 'font: 10px arial, san serif;' +\n\t 'text-align: left;' +\n\t 'white-space: nowrap;' +\n\t 'padding: 5px;' +\n\t 'border: 1px solid white;' +\n\t 'z-index: 10000;' +\n\t '}' +\n\t '.jqsfield { ' +\n\t 'color: white;' +\n\t 'font: 10px arial, san serif;' +\n\t 'text-align: left;' +\n\t '}';\n\t\n\t /**\n\t * Utilities\n\t */\n\t\n\t createClass = function (/* [baseclass, [mixin, ...]], definition */) {\n\t var Class, args;\n\t Class = function () {\n\t this.init.apply(this, arguments);\n\t };\n\t if (arguments.length > 1) {\n\t if (arguments[0]) {\n\t Class.prototype = $.extend(new arguments[0](), arguments[arguments.length - 1]);\n\t Class._super = arguments[0].prototype;\n\t } else {\n\t Class.prototype = arguments[arguments.length - 1];\n\t }\n\t if (arguments.length > 2) {\n\t args = Array.prototype.slice.call(arguments, 1, -1);\n\t args.unshift(Class.prototype);\n\t $.extend.apply($, args);\n\t }\n\t } else {\n\t Class.prototype = arguments[0];\n\t }\n\t Class.prototype.cls = Class;\n\t return Class;\n\t };\n\t\n\t /**\n\t * Wraps a format string for tooltips\n\t * {{x}}\n\t * {{x.2}\n\t * {{x:months}}\n\t */\n\t $.SPFormatClass = SPFormat = createClass({\n\t fre: /\\{\\{([\\w.]+?)(:(.+?))?\\}\\}/g,\n\t precre: /(\\w+)\\.(\\d+)/,\n\t\n\t init: function (format, fclass) {\n\t this.format = format;\n\t this.fclass = fclass;\n\t },\n\t\n\t render: function (fieldset, lookups, options) {\n\t var self = this,\n\t fields = fieldset,\n\t match, token, lookupkey, fieldvalue, prec;\n\t return this.format.replace(this.fre, function () {\n\t var lookup;\n\t token = arguments[1];\n\t lookupkey = arguments[3];\n\t match = self.precre.exec(token);\n\t if (match) {\n\t prec = match[2];\n\t token = match[1];\n\t } else {\n\t prec = false;\n\t }\n\t fieldvalue = fields[token];\n\t if (fieldvalue === undefined) {\n\t return '';\n\t }\n\t if (lookupkey && lookups && lookups[lookupkey]) {\n\t lookup = lookups[lookupkey];\n\t if (lookup.get) { // RangeMap\n\t return lookups[lookupkey].get(fieldvalue) || fieldvalue;\n\t } else {\n\t return lookups[lookupkey][fieldvalue] || fieldvalue;\n\t }\n\t }\n\t if (isNumber(fieldvalue)) {\n\t if (options.get('numberFormatter')) {\n\t fieldvalue = options.get('numberFormatter')(fieldvalue);\n\t } else {\n\t fieldvalue = formatNumber(fieldvalue, prec,\n\t options.get('numberDigitGroupCount'),\n\t options.get('numberDigitGroupSep'),\n\t options.get('numberDecimalMark'));\n\t }\n\t }\n\t return fieldvalue;\n\t });\n\t }\n\t });\n\t\n\t // convience method to avoid needing the new operator\n\t $.spformat = function(format, fclass) {\n\t return new SPFormat(format, fclass);\n\t };\n\t\n\t clipval = function (val, min, max) {\n\t if (val < min) {\n\t return min;\n\t }\n\t if (val > max) {\n\t return max;\n\t }\n\t return val;\n\t };\n\t\n\t quartile = function (values, q) {\n\t var vl;\n\t if (q === 2) {\n\t vl = Math.floor(values.length / 2);\n\t return values.length % 2 ? values[vl] : (values[vl-1] + values[vl]) / 2;\n\t } else {\n\t if (values.length % 2 ) { // odd\n\t vl = (values.length * q + q) / 4;\n\t return vl % 1 ? (values[Math.floor(vl)] + values[Math.floor(vl) - 1]) / 2 : values[vl-1];\n\t } else { //even\n\t vl = (values.length * q + 2) / 4;\n\t return vl % 1 ? (values[Math.floor(vl)] + values[Math.floor(vl) - 1]) / 2 : values[vl-1];\n\t\n\t }\n\t }\n\t };\n\t\n\t normalizeValue = function (val) {\n\t var nf;\n\t switch (val) {\n\t case 'undefined':\n\t val = undefined;\n\t break;\n\t case 'null':\n\t val = null;\n\t break;\n\t case 'true':\n\t val = true;\n\t break;\n\t case 'false':\n\t val = false;\n\t break;\n\t default:\n\t nf = parseFloat(val);\n\t if (val == nf) {\n\t val = nf;\n\t }\n\t }\n\t return val;\n\t };\n\t\n\t normalizeValues = function (vals) {\n\t var i, result = [];\n\t for (i = vals.length; i--;) {\n\t result[i] = normalizeValue(vals[i]);\n\t }\n\t return result;\n\t };\n\t\n\t remove = function (vals, filter) {\n\t var i, vl, result = [];\n\t for (i = 0, vl = vals.length; i < vl; i++) {\n\t if (vals[i] !== filter) {\n\t result.push(vals[i]);\n\t }\n\t }\n\t return result;\n\t };\n\t\n\t isNumber = function (num) {\n\t return !isNaN(parseFloat(num)) && isFinite(num);\n\t };\n\t\n\t formatNumber = function (num, prec, groupsize, groupsep, decsep) {\n\t var p, i;\n\t num = (prec === false ? parseFloat(num).toString() : num.toFixed(prec)).split('');\n\t p = (p = $.inArray('.', num)) < 0 ? num.length : p;\n\t if (p < num.length) {\n\t num[p] = decsep;\n\t }\n\t for (i = p - groupsize; i > 0; i -= groupsize) {\n\t num.splice(i, 0, groupsep);\n\t }\n\t return num.join('');\n\t };\n\t\n\t // determine if all values of an array match a value\n\t // returns true if the array is empty\n\t all = function (val, arr, ignoreNull) {\n\t var i;\n\t for (i = arr.length; i--; ) {\n\t if (ignoreNull && arr[i] === null) continue;\n\t if (arr[i] !== val) {\n\t return false;\n\t }\n\t }\n\t return true;\n\t };\n\t\n\t // sums the numeric values in an array, ignoring other values\n\t sum = function (vals) {\n\t var total = 0, i;\n\t for (i = vals.length; i--;) {\n\t total += typeof vals[i] === 'number' ? vals[i] : 0;\n\t }\n\t return total;\n\t };\n\t\n\t ensureArray = function (val) {\n\t return $.isArray(val) ? val : [val];\n\t };\n\t\n\t // http://paulirish.com/2008/bookmarklet-inject-new-css-rules/\n\t addCSS = function(css) {\n\t var tag;\n\t //if ('\\v' == 'v') /* ie only */ {\n\t if (document.createStyleSheet) {\n\t document.createStyleSheet().cssText = css;\n\t } else {\n\t tag = document.createElement('style');\n\t tag.type = 'text/css';\n\t document.getElementsByTagName('head')[0].appendChild(tag);\n\t tag[(typeof document.body.style.WebkitAppearance == 'string') /* webkit only */ ? 'innerText' : 'innerHTML'] = css;\n\t }\n\t };\n\t\n\t // Provide a cross-browser interface to a few simple drawing primitives\n\t $.fn.simpledraw = function (width, height, useExisting, interact) {\n\t var target, mhandler;\n\t if (useExisting && (target = this.data('_jqs_vcanvas'))) {\n\t return target;\n\t }\n\t\n\t if ($.fn.sparkline.canvas === false) {\n\t // We've already determined that neither Canvas nor VML are available\n\t return false;\n\t\n\t } else if ($.fn.sparkline.canvas === undefined) {\n\t // No function defined yet -- need to see if we support Canvas or VML\n\t var el = document.createElement('canvas');\n\t if (!!(el.getContext && el.getContext('2d'))) {\n\t // Canvas is available\n\t $.fn.sparkline.canvas = function(width, height, target, interact) {\n\t return new VCanvas_canvas(width, height, target, interact);\n\t };\n\t } else if (document.namespaces && !document.namespaces.v) {\n\t // VML is available\n\t document.namespaces.add('v', 'urn:schemas-microsoft-com:vml', '#default#VML');\n\t $.fn.sparkline.canvas = function(width, height, target, interact) {\n\t return new VCanvas_vml(width, height, target);\n\t };\n\t } else {\n\t // Neither Canvas nor VML are available\n\t $.fn.sparkline.canvas = false;\n\t return false;\n\t }\n\t }\n\t\n\t if (width === undefined) {\n\t width = $(this).innerWidth();\n\t }\n\t if (height === undefined) {\n\t height = $(this).innerHeight();\n\t }\n\t\n\t target = $.fn.sparkline.canvas(width, height, this, interact);\n\t\n\t mhandler = $(this).data('_jqs_mhandler');\n\t if (mhandler) {\n\t mhandler.registerCanvas(target);\n\t }\n\t return target;\n\t };\n\t\n\t $.fn.cleardraw = function () {\n\t var target = this.data('_jqs_vcanvas');\n\t if (target) {\n\t target.reset();\n\t }\n\t };\n\t\n\t $.RangeMapClass = RangeMap = createClass({\n\t init: function (map) {\n\t var key, range, rangelist = [];\n\t for (key in map) {\n\t if (map.hasOwnProperty(key) && typeof key === 'string' && key.indexOf(':') > -1) {\n\t range = key.split(':');\n\t range[0] = range[0].length === 0 ? -Infinity : parseFloat(range[0]);\n\t range[1] = range[1].length === 0 ? Infinity : parseFloat(range[1]);\n\t range[2] = map[key];\n\t rangelist.push(range);\n\t }\n\t }\n\t this.map = map;\n\t this.rangelist = rangelist || false;\n\t },\n\t\n\t get: function (value) {\n\t var rangelist = this.rangelist,\n\t i, range, result;\n\t if ((result = this.map[value]) !== undefined) {\n\t return result;\n\t }\n\t if (rangelist) {\n\t for (i = rangelist.length; i--;) {\n\t range = rangelist[i];\n\t if (range[0] <= value && range[1] >= value) {\n\t return range[2];\n\t }\n\t }\n\t }\n\t return undefined;\n\t }\n\t });\n\t\n\t // Convenience function\n\t $.range_map = function(map) {\n\t return new RangeMap(map);\n\t };\n\t\n\t MouseHandler = createClass({\n\t init: function (el, options) {\n\t var $el = $(el);\n\t this.$el = $el;\n\t this.options = options;\n\t this.currentPageX = 0;\n\t this.currentPageY = 0;\n\t this.el = el;\n\t this.splist = [];\n\t this.tooltip = null;\n\t this.over = false;\n\t this.displayTooltips = !options.get('disableTooltips');\n\t this.highlightEnabled = !options.get('disableHighlight');\n\t },\n\t\n\t registerSparkline: function (sp) {\n\t this.splist.push(sp);\n\t if (this.over) {\n\t this.updateDisplay();\n\t }\n\t },\n\t\n\t registerCanvas: function (canvas) {\n\t var $canvas = $(canvas.canvas);\n\t this.canvas = canvas;\n\t this.$canvas = $canvas;\n\t $canvas.mouseenter($.proxy(this.mouseenter, this));\n\t $canvas.mouseleave($.proxy(this.mouseleave, this));\n\t $canvas.click($.proxy(this.mouseclick, this));\n\t },\n\t\n\t reset: function (removeTooltip) {\n\t this.splist = [];\n\t if (this.tooltip && removeTooltip) {\n\t this.tooltip.remove();\n\t this.tooltip = undefined;\n\t }\n\t },\n\t\n\t mouseclick: function (e) {\n\t var clickEvent = $.Event('sparklineClick');\n\t clickEvent.originalEvent = e;\n\t clickEvent.sparklines = this.splist;\n\t this.$el.trigger(clickEvent);\n\t },\n\t\n\t mouseenter: function (e) {\n\t $(document.body).unbind('mousemove.jqs');\n\t $(document.body).bind('mousemove.jqs', $.proxy(this.mousemove, this));\n\t this.over = true;\n\t this.currentPageX = e.pageX;\n\t this.currentPageY = e.pageY;\n\t this.currentEl = e.target;\n\t if (!this.tooltip && this.displayTooltips) {\n\t this.tooltip = new Tooltip(this.options);\n\t this.tooltip.updatePosition(e.pageX, e.pageY);\n\t }\n\t this.updateDisplay();\n\t },\n\t\n\t mouseleave: function () {\n\t $(document.body).unbind('mousemove.jqs');\n\t var splist = this.splist,\n\t spcount = splist.length,\n\t needsRefresh = false,\n\t sp, i;\n\t this.over = false;\n\t this.currentEl = null;\n\t\n\t if (this.tooltip) {\n\t this.tooltip.remove();\n\t this.tooltip = null;\n\t }\n\t\n\t for (i = 0; i < spcount; i++) {\n\t sp = splist[i];\n\t if (sp.clearRegionHighlight()) {\n\t needsRefresh = true;\n\t }\n\t }\n\t\n\t if (needsRefresh) {\n\t this.canvas.render();\n\t }\n\t },\n\t\n\t mousemove: function (e) {\n\t this.currentPageX = e.pageX;\n\t this.currentPageY = e.pageY;\n\t this.currentEl = e.target;\n\t if (this.tooltip) {\n\t this.tooltip.updatePosition(e.pageX, e.pageY);\n\t }\n\t this.updateDisplay();\n\t },\n\t\n\t updateDisplay: function () {\n\t var splist = this.splist,\n\t spcount = splist.length,\n\t needsRefresh = false,\n\t offset = this.$canvas.offset(),\n\t localX = this.currentPageX - offset.left,\n\t localY = this.currentPageY - offset.top,\n\t tooltiphtml, sp, i, result, changeEvent;\n\t if (!this.over) {\n\t return;\n\t }\n\t for (i = 0; i < spcount; i++) {\n\t sp = splist[i];\n\t result = sp.setRegionHighlight(this.currentEl, localX, localY);\n\t if (result) {\n\t needsRefresh = true;\n\t }\n\t }\n\t if (needsRefresh) {\n\t changeEvent = $.Event('sparklineRegionChange');\n\t changeEvent.sparklines = this.splist;\n\t this.$el.trigger(changeEvent);\n\t if (this.tooltip) {\n\t tooltiphtml = '';\n\t for (i = 0; i < spcount; i++) {\n\t sp = splist[i];\n\t tooltiphtml += sp.getCurrentRegionTooltip();\n\t }\n\t this.tooltip.setContent(tooltiphtml);\n\t }\n\t if (!this.disableHighlight) {\n\t this.canvas.render();\n\t }\n\t }\n\t if (result === null) {\n\t this.mouseleave();\n\t }\n\t }\n\t });\n\t\n\t\n\t Tooltip = createClass({\n\t sizeStyle: 'position: static !important;' +\n\t 'display: block !important;' +\n\t 'visibility: hidden !important;' +\n\t 'float: left !important;',\n\t\n\t init: function (options) {\n\t var tooltipClassname = options.get('tooltipClassname', 'jqstooltip'),\n\t sizetipStyle = this.sizeStyle,\n\t offset;\n\t this.container = options.get('tooltipContainer') || document.body;\n\t this.tooltipOffsetX = options.get('tooltipOffsetX', 10);\n\t this.tooltipOffsetY = options.get('tooltipOffsetY', 12);\n\t // remove any previous lingering tooltip\n\t $('#jqssizetip').remove();\n\t $('#jqstooltip').remove();\n\t this.sizetip = $('
          ', {\n\t id: 'jqssizetip',\n\t style: sizetipStyle,\n\t 'class': tooltipClassname\n\t });\n\t this.tooltip = $('
          ', {\n\t id: 'jqstooltip',\n\t 'class': tooltipClassname\n\t }).appendTo(this.container);\n\t // account for the container's location\n\t offset = this.tooltip.offset();\n\t this.offsetLeft = offset.left;\n\t this.offsetTop = offset.top;\n\t this.hidden = true;\n\t $(window).unbind('resize.jqs scroll.jqs');\n\t $(window).bind('resize.jqs scroll.jqs', $.proxy(this.updateWindowDims, this));\n\t this.updateWindowDims();\n\t },\n\t\n\t updateWindowDims: function () {\n\t this.scrollTop = $(window).scrollTop();\n\t this.scrollLeft = $(window).scrollLeft();\n\t this.scrollRight = this.scrollLeft + $(window).width();\n\t this.updatePosition();\n\t },\n\t\n\t getSize: function (content) {\n\t this.sizetip.html(content).appendTo(this.container);\n\t this.width = this.sizetip.width() + 1;\n\t this.height = this.sizetip.height();\n\t this.sizetip.remove();\n\t },\n\t\n\t setContent: function (content) {\n\t if (!content) {\n\t this.tooltip.css('visibility', 'hidden');\n\t this.hidden = true;\n\t return;\n\t }\n\t this.getSize(content);\n\t this.tooltip.html(content)\n\t .css({\n\t 'width': this.width,\n\t 'height': this.height,\n\t 'visibility': 'visible'\n\t });\n\t if (this.hidden) {\n\t this.hidden = false;\n\t this.updatePosition();\n\t }\n\t },\n\t\n\t updatePosition: function (x, y) {\n\t if (x === undefined) {\n\t if (this.mousex === undefined) {\n\t return;\n\t }\n\t x = this.mousex - this.offsetLeft;\n\t y = this.mousey - this.offsetTop;\n\t\n\t } else {\n\t this.mousex = x = x - this.offsetLeft;\n\t this.mousey = y = y - this.offsetTop;\n\t }\n\t if (!this.height || !this.width || this.hidden) {\n\t return;\n\t }\n\t\n\t y -= this.height + this.tooltipOffsetY;\n\t x += this.tooltipOffsetX;\n\t\n\t if (y < this.scrollTop) {\n\t y = this.scrollTop;\n\t }\n\t if (x < this.scrollLeft) {\n\t x = this.scrollLeft;\n\t } else if (x + this.width > this.scrollRight) {\n\t x = this.scrollRight - this.width;\n\t }\n\t\n\t this.tooltip.css({\n\t 'left': x,\n\t 'top': y\n\t });\n\t },\n\t\n\t remove: function () {\n\t this.tooltip.remove();\n\t this.sizetip.remove();\n\t this.sizetip = this.tooltip = undefined;\n\t $(window).unbind('resize.jqs scroll.jqs');\n\t }\n\t });\n\t\n\t initStyles = function() {\n\t addCSS(defaultStyles);\n\t };\n\t\n\t $(initStyles);\n\t\n\t pending = [];\n\t $.fn.sparkline = function (userValues, userOptions) {\n\t return this.each(function () {\n\t var options = new $.fn.sparkline.options(this, userOptions),\n\t $this = $(this),\n\t render, i;\n\t render = function () {\n\t var values, width, height, tmp, mhandler, sp, vals;\n\t if (userValues === 'html' || userValues === undefined) {\n\t vals = this.getAttribute(options.get('tagValuesAttribute'));\n\t if (vals === undefined || vals === null) {\n\t vals = $this.html();\n\t }\n\t values = vals.replace(/(^\\s*\\s*$)|\\s+/g, '').split(',');\n\t } else {\n\t values = userValues;\n\t }\n\t\n\t width = options.get('width') === 'auto' ? values.length * options.get('defaultPixelsPerValue') : options.get('width');\n\t if (options.get('height') === 'auto') {\n\t if (!options.get('composite') || !$.data(this, '_jqs_vcanvas')) {\n\t // must be a better way to get the line height\n\t tmp = document.createElement('span');\n\t tmp.innerHTML = 'a';\n\t $this.html(tmp);\n\t height = $(tmp).innerHeight() || $(tmp).height();\n\t $(tmp).remove();\n\t tmp = null;\n\t }\n\t } else {\n\t height = options.get('height');\n\t }\n\t\n\t if (!options.get('disableInteraction')) {\n\t mhandler = $.data(this, '_jqs_mhandler');\n\t if (!mhandler) {\n\t mhandler = new MouseHandler(this, options);\n\t $.data(this, '_jqs_mhandler', mhandler);\n\t } else if (!options.get('composite')) {\n\t mhandler.reset();\n\t }\n\t } else {\n\t mhandler = false;\n\t }\n\t\n\t if (options.get('composite') && !$.data(this, '_jqs_vcanvas')) {\n\t if (!$.data(this, '_jqs_errnotify')) {\n\t alert('Attempted to attach a composite sparkline to an element with no existing sparkline');\n\t $.data(this, '_jqs_errnotify', true);\n\t }\n\t return;\n\t }\n\t\n\t sp = new $.fn.sparkline[options.get('type')](this, values, options, width, height);\n\t\n\t sp.render();\n\t\n\t if (mhandler) {\n\t mhandler.registerSparkline(sp);\n\t }\n\t };\n\t if (($(this).html() && !options.get('disableHiddenCheck') && $(this).is(':hidden')) || !$(this).parents('body').length) {\n\t if (!options.get('composite') && $.data(this, '_jqs_pending')) {\n\t // remove any existing references to the element\n\t for (i = pending.length; i; i--) {\n\t if (pending[i - 1][0] == this) {\n\t pending.splice(i - 1, 1);\n\t }\n\t }\n\t }\n\t pending.push([this, render]);\n\t $.data(this, '_jqs_pending', true);\n\t } else {\n\t render.call(this);\n\t }\n\t });\n\t };\n\t\n\t $.fn.sparkline.defaults = getDefaults();\n\t\n\t\n\t $.sparkline_display_visible = function () {\n\t var el, i, pl;\n\t var done = [];\n\t for (i = 0, pl = pending.length; i < pl; i++) {\n\t el = pending[i][0];\n\t if ($(el).is(':visible') && !$(el).parents().is(':hidden')) {\n\t pending[i][1].call(el);\n\t $.data(pending[i][0], '_jqs_pending', false);\n\t done.push(i);\n\t } else if (!$(el).closest('html').length && !$.data(el, '_jqs_pending')) {\n\t // element has been inserted and removed from the DOM\n\t // If it was not yet inserted into the dom then the .data request\n\t // will return true.\n\t // removing from the dom causes the data to be removed.\n\t $.data(pending[i][0], '_jqs_pending', false);\n\t done.push(i);\n\t }\n\t }\n\t for (i = done.length; i; i--) {\n\t pending.splice(done[i - 1], 1);\n\t }\n\t };\n\t\n\t\n\t /**\n\t * User option handler\n\t */\n\t $.fn.sparkline.options = createClass({\n\t init: function (tag, userOptions) {\n\t var extendedOptions, defaults, base, tagOptionType;\n\t this.userOptions = userOptions = userOptions || {};\n\t this.tag = tag;\n\t this.tagValCache = {};\n\t defaults = $.fn.sparkline.defaults;\n\t base = defaults.common;\n\t this.tagOptionsPrefix = userOptions.enableTagOptions && (userOptions.tagOptionsPrefix || base.tagOptionsPrefix);\n\t\n\t tagOptionType = this.getTagSetting('type');\n\t if (tagOptionType === UNSET_OPTION) {\n\t extendedOptions = defaults[userOptions.type || base.type];\n\t } else {\n\t extendedOptions = defaults[tagOptionType];\n\t }\n\t this.mergedOptions = $.extend({}, base, extendedOptions, userOptions);\n\t },\n\t\n\t\n\t getTagSetting: function (key) {\n\t var prefix = this.tagOptionsPrefix,\n\t val, i, pairs, keyval;\n\t if (prefix === false || prefix === undefined) {\n\t return UNSET_OPTION;\n\t }\n\t if (this.tagValCache.hasOwnProperty(key)) {\n\t val = this.tagValCache.key;\n\t } else {\n\t val = this.tag.getAttribute(prefix + key);\n\t if (val === undefined || val === null) {\n\t val = UNSET_OPTION;\n\t } else if (val.substr(0, 1) === '[') {\n\t val = val.substr(1, val.length - 2).split(',');\n\t for (i = val.length; i--;) {\n\t val[i] = normalizeValue(val[i].replace(/(^\\s*)|(\\s*$)/g, ''));\n\t }\n\t } else if (val.substr(0, 1) === '{') {\n\t pairs = val.substr(1, val.length - 2).split(',');\n\t val = {};\n\t for (i = pairs.length; i--;) {\n\t keyval = pairs[i].split(':', 2);\n\t val[keyval[0].replace(/(^\\s*)|(\\s*$)/g, '')] = normalizeValue(keyval[1].replace(/(^\\s*)|(\\s*$)/g, ''));\n\t }\n\t } else {\n\t val = normalizeValue(val);\n\t }\n\t this.tagValCache.key = val;\n\t }\n\t return val;\n\t },\n\t\n\t get: function (key, defaultval) {\n\t var tagOption = this.getTagSetting(key),\n\t result;\n\t if (tagOption !== UNSET_OPTION) {\n\t return tagOption;\n\t }\n\t return (result = this.mergedOptions[key]) === undefined ? defaultval : result;\n\t }\n\t });\n\t\n\t\n\t $.fn.sparkline._base = createClass({\n\t disabled: false,\n\t\n\t init: function (el, values, options, width, height) {\n\t this.el = el;\n\t this.$el = $(el);\n\t this.values = values;\n\t this.options = options;\n\t this.width = width;\n\t this.height = height;\n\t this.currentRegion = undefined;\n\t },\n\t\n\t /**\n\t * Setup the canvas\n\t */\n\t initTarget: function () {\n\t var interactive = !this.options.get('disableInteraction');\n\t if (!(this.target = this.$el.simpledraw(this.width, this.height, this.options.get('composite'), interactive))) {\n\t this.disabled = true;\n\t } else {\n\t this.canvasWidth = this.target.pixelWidth;\n\t this.canvasHeight = this.target.pixelHeight;\n\t }\n\t },\n\t\n\t /**\n\t * Actually render the chart to the canvas\n\t */\n\t render: function () {\n\t if (this.disabled) {\n\t this.el.innerHTML = '';\n\t return false;\n\t }\n\t return true;\n\t },\n\t\n\t /**\n\t * Return a region id for a given x/y co-ordinate\n\t */\n\t getRegion: function (x, y) {\n\t },\n\t\n\t /**\n\t * Highlight an item based on the moused-over x,y co-ordinate\n\t */\n\t setRegionHighlight: function (el, x, y) {\n\t var currentRegion = this.currentRegion,\n\t highlightEnabled = !this.options.get('disableHighlight'),\n\t newRegion;\n\t if (x > this.canvasWidth || y > this.canvasHeight || x < 0 || y < 0) {\n\t return null;\n\t }\n\t newRegion = this.getRegion(el, x, y);\n\t if (currentRegion !== newRegion) {\n\t if (currentRegion !== undefined && highlightEnabled) {\n\t this.removeHighlight();\n\t }\n\t this.currentRegion = newRegion;\n\t if (newRegion !== undefined && highlightEnabled) {\n\t this.renderHighlight();\n\t }\n\t return true;\n\t }\n\t return false;\n\t },\n\t\n\t /**\n\t * Reset any currently highlighted item\n\t */\n\t clearRegionHighlight: function () {\n\t if (this.currentRegion !== undefined) {\n\t this.removeHighlight();\n\t this.currentRegion = undefined;\n\t return true;\n\t }\n\t return false;\n\t },\n\t\n\t renderHighlight: function () {\n\t this.changeHighlight(true);\n\t },\n\t\n\t removeHighlight: function () {\n\t this.changeHighlight(false);\n\t },\n\t\n\t changeHighlight: function (highlight) {},\n\t\n\t /**\n\t * Fetch the HTML to display as a tooltip\n\t */\n\t getCurrentRegionTooltip: function () {\n\t var options = this.options,\n\t header = '',\n\t entries = [],\n\t fields, formats, formatlen, fclass, text, i,\n\t showFields, showFieldsKey, newFields, fv,\n\t formatter, format, fieldlen, j;\n\t if (this.currentRegion === undefined) {\n\t return '';\n\t }\n\t fields = this.getCurrentRegionFields();\n\t formatter = options.get('tooltipFormatter');\n\t if (formatter) {\n\t return formatter(this, options, fields);\n\t }\n\t if (options.get('tooltipChartTitle')) {\n\t header += '
          ' + options.get('tooltipChartTitle') + '
          \\n';\n\t }\n\t formats = this.options.get('tooltipFormat');\n\t if (!formats) {\n\t return '';\n\t }\n\t if (!$.isArray(formats)) {\n\t formats = [formats];\n\t }\n\t if (!$.isArray(fields)) {\n\t fields = [fields];\n\t }\n\t showFields = this.options.get('tooltipFormatFieldlist');\n\t showFieldsKey = this.options.get('tooltipFormatFieldlistKey');\n\t if (showFields && showFieldsKey) {\n\t // user-selected ordering of fields\n\t newFields = [];\n\t for (i = fields.length; i--;) {\n\t fv = fields[i][showFieldsKey];\n\t if ((j = $.inArray(fv, showFields)) != -1) {\n\t newFields[j] = fields[i];\n\t }\n\t }\n\t fields = newFields;\n\t }\n\t formatlen = formats.length;\n\t fieldlen = fields.length;\n\t for (i = 0; i < formatlen; i++) {\n\t format = formats[i];\n\t if (typeof format === 'string') {\n\t format = new SPFormat(format);\n\t }\n\t fclass = format.fclass || 'jqsfield';\n\t for (j = 0; j < fieldlen; j++) {\n\t if (!fields[j].isNull || !options.get('tooltipSkipNull')) {\n\t $.extend(fields[j], {\n\t prefix: options.get('tooltipPrefix'),\n\t suffix: options.get('tooltipSuffix')\n\t });\n\t text = format.render(fields[j], options.get('tooltipValueLookups'), options);\n\t entries.push('
          ' + text + '
          ');\n\t }\n\t }\n\t }\n\t if (entries.length) {\n\t return header + entries.join('\\n');\n\t }\n\t return '';\n\t },\n\t\n\t getCurrentRegionFields: function () {},\n\t\n\t calcHighlightColor: function (color, options) {\n\t var highlightColor = options.get('highlightColor'),\n\t lighten = options.get('highlightLighten'),\n\t parse, mult, rgbnew, i;\n\t if (highlightColor) {\n\t return highlightColor;\n\t }\n\t if (lighten) {\n\t // extract RGB values\n\t parse = /^#([0-9a-f])([0-9a-f])([0-9a-f])$/i.exec(color) || /^#([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})$/i.exec(color);\n\t if (parse) {\n\t rgbnew = [];\n\t mult = color.length === 4 ? 16 : 1;\n\t for (i = 0; i < 3; i++) {\n\t rgbnew[i] = clipval(Math.round(parseInt(parse[i + 1], 16) * mult * lighten), 0, 255);\n\t }\n\t return 'rgb(' + rgbnew.join(',') + ')';\n\t }\n\t\n\t }\n\t return color;\n\t }\n\t\n\t });\n\t\n\t barHighlightMixin = {\n\t changeHighlight: function (highlight) {\n\t var currentRegion = this.currentRegion,\n\t target = this.target,\n\t shapeids = this.regionShapes[currentRegion],\n\t newShapes;\n\t // will be null if the region value was null\n\t if (shapeids) {\n\t newShapes = this.renderRegion(currentRegion, highlight);\n\t if ($.isArray(newShapes) || $.isArray(shapeids)) {\n\t target.replaceWithShapes(shapeids, newShapes);\n\t this.regionShapes[currentRegion] = $.map(newShapes, function (newShape) {\n\t return newShape.id;\n\t });\n\t } else {\n\t target.replaceWithShape(shapeids, newShapes);\n\t this.regionShapes[currentRegion] = newShapes.id;\n\t }\n\t }\n\t },\n\t\n\t render: function () {\n\t var values = this.values,\n\t target = this.target,\n\t regionShapes = this.regionShapes,\n\t shapes, ids, i, j;\n\t\n\t if (!this.cls._super.render.call(this)) {\n\t return;\n\t }\n\t for (i = values.length; i--;) {\n\t shapes = this.renderRegion(i);\n\t if (shapes) {\n\t if ($.isArray(shapes)) {\n\t ids = [];\n\t for (j = shapes.length; j--;) {\n\t shapes[j].append();\n\t ids.push(shapes[j].id);\n\t }\n\t regionShapes[i] = ids;\n\t } else {\n\t shapes.append();\n\t regionShapes[i] = shapes.id; // store just the shapeid\n\t }\n\t } else {\n\t // null value\n\t regionShapes[i] = null;\n\t }\n\t }\n\t target.render();\n\t }\n\t };\n\t\n\t /**\n\t * Line charts\n\t */\n\t $.fn.sparkline.line = line = createClass($.fn.sparkline._base, {\n\t type: 'line',\n\t\n\t init: function (el, values, options, width, height) {\n\t line._super.init.call(this, el, values, options, width, height);\n\t this.vertices = [];\n\t this.regionMap = [];\n\t this.xvalues = [];\n\t this.yvalues = [];\n\t this.yminmax = [];\n\t this.hightlightSpotId = null;\n\t this.lastShapeId = null;\n\t this.initTarget();\n\t },\n\t\n\t getRegion: function (el, x, y) {\n\t var i,\n\t regionMap = this.regionMap; // maps regions to value positions\n\t for (i = regionMap.length; i--;) {\n\t if (regionMap[i] !== null && x >= regionMap[i][0] && x <= regionMap[i][1]) {\n\t return regionMap[i][2];\n\t }\n\t }\n\t return undefined;\n\t },\n\t\n\t getCurrentRegionFields: function () {\n\t var currentRegion = this.currentRegion;\n\t return {\n\t isNull: this.yvalues[currentRegion] === null,\n\t x: this.xvalues[currentRegion],\n\t y: this.yvalues[currentRegion],\n\t color: this.options.get('lineColor'),\n\t fillColor: this.options.get('fillColor'),\n\t offset: currentRegion\n\t };\n\t },\n\t\n\t renderHighlight: function () {\n\t var currentRegion = this.currentRegion,\n\t target = this.target,\n\t vertex = this.vertices[currentRegion],\n\t options = this.options,\n\t spotRadius = options.get('spotRadius'),\n\t highlightSpotColor = options.get('highlightSpotColor'),\n\t highlightLineColor = options.get('highlightLineColor'),\n\t highlightSpot, highlightLine;\n\t\n\t if (!vertex) {\n\t return;\n\t }\n\t if (spotRadius && highlightSpotColor) {\n\t highlightSpot = target.drawCircle(vertex[0], vertex[1],\n\t spotRadius, undefined, highlightSpotColor);\n\t this.highlightSpotId = highlightSpot.id;\n\t target.insertAfterShape(this.lastShapeId, highlightSpot);\n\t }\n\t if (highlightLineColor) {\n\t highlightLine = target.drawLine(vertex[0], this.canvasTop, vertex[0],\n\t this.canvasTop + this.canvasHeight, highlightLineColor);\n\t this.highlightLineId = highlightLine.id;\n\t target.insertAfterShape(this.lastShapeId, highlightLine);\n\t }\n\t },\n\t\n\t removeHighlight: function () {\n\t var target = this.target;\n\t if (this.highlightSpotId) {\n\t target.removeShapeId(this.highlightSpotId);\n\t this.highlightSpotId = null;\n\t }\n\t if (this.highlightLineId) {\n\t target.removeShapeId(this.highlightLineId);\n\t this.highlightLineId = null;\n\t }\n\t },\n\t\n\t scanValues: function () {\n\t var values = this.values,\n\t valcount = values.length,\n\t xvalues = this.xvalues,\n\t yvalues = this.yvalues,\n\t yminmax = this.yminmax,\n\t i, val, isStr, isArray, sp;\n\t for (i = 0; i < valcount; i++) {\n\t val = values[i];\n\t isStr = typeof(values[i]) === 'string';\n\t isArray = typeof(values[i]) === 'object' && values[i] instanceof Array;\n\t sp = isStr && values[i].split(':');\n\t if (isStr && sp.length === 2) { // x:y\n\t xvalues.push(Number(sp[0]));\n\t yvalues.push(Number(sp[1]));\n\t yminmax.push(Number(sp[1]));\n\t } else if (isArray) {\n\t xvalues.push(val[0]);\n\t yvalues.push(val[1]);\n\t yminmax.push(val[1]);\n\t } else {\n\t xvalues.push(i);\n\t if (values[i] === null || values[i] === 'null') {\n\t yvalues.push(null);\n\t } else {\n\t yvalues.push(Number(val));\n\t yminmax.push(Number(val));\n\t }\n\t }\n\t }\n\t if (this.options.get('xvalues')) {\n\t xvalues = this.options.get('xvalues');\n\t }\n\t\n\t this.maxy = this.maxyorg = Math.max.apply(Math, yminmax);\n\t this.miny = this.minyorg = Math.min.apply(Math, yminmax);\n\t\n\t this.maxx = Math.max.apply(Math, xvalues);\n\t this.minx = Math.min.apply(Math, xvalues);\n\t\n\t this.xvalues = xvalues;\n\t this.yvalues = yvalues;\n\t this.yminmax = yminmax;\n\t\n\t },\n\t\n\t processRangeOptions: function () {\n\t var options = this.options,\n\t normalRangeMin = options.get('normalRangeMin'),\n\t normalRangeMax = options.get('normalRangeMax');\n\t\n\t if (normalRangeMin !== undefined) {\n\t if (normalRangeMin < this.miny) {\n\t this.miny = normalRangeMin;\n\t }\n\t if (normalRangeMax > this.maxy) {\n\t this.maxy = normalRangeMax;\n\t }\n\t }\n\t if (options.get('chartRangeMin') !== undefined && (options.get('chartRangeClip') || options.get('chartRangeMin') < this.miny)) {\n\t this.miny = options.get('chartRangeMin');\n\t }\n\t if (options.get('chartRangeMax') !== undefined && (options.get('chartRangeClip') || options.get('chartRangeMax') > this.maxy)) {\n\t this.maxy = options.get('chartRangeMax');\n\t }\n\t if (options.get('chartRangeMinX') !== undefined && (options.get('chartRangeClipX') || options.get('chartRangeMinX') < this.minx)) {\n\t this.minx = options.get('chartRangeMinX');\n\t }\n\t if (options.get('chartRangeMaxX') !== undefined && (options.get('chartRangeClipX') || options.get('chartRangeMaxX') > this.maxx)) {\n\t this.maxx = options.get('chartRangeMaxX');\n\t }\n\t\n\t },\n\t\n\t drawNormalRange: function (canvasLeft, canvasTop, canvasHeight, canvasWidth, rangey) {\n\t var normalRangeMin = this.options.get('normalRangeMin'),\n\t normalRangeMax = this.options.get('normalRangeMax'),\n\t ytop = canvasTop + Math.round(canvasHeight - (canvasHeight * ((normalRangeMax - this.miny) / rangey))),\n\t height = Math.round((canvasHeight * (normalRangeMax - normalRangeMin)) / rangey);\n\t this.target.drawRect(canvasLeft, ytop, canvasWidth, height, undefined, this.options.get('normalRangeColor')).append();\n\t },\n\t\n\t render: function () {\n\t var options = this.options,\n\t target = this.target,\n\t canvasWidth = this.canvasWidth,\n\t canvasHeight = this.canvasHeight,\n\t vertices = this.vertices,\n\t spotRadius = options.get('spotRadius'),\n\t regionMap = this.regionMap,\n\t rangex, rangey, yvallast,\n\t canvasTop, canvasLeft,\n\t vertex, path, paths, x, y, xnext, xpos, xposnext,\n\t last, next, yvalcount, lineShapes, fillShapes, plen,\n\t valueSpots, hlSpotsEnabled, color, xvalues, yvalues, i;\n\t\n\t if (!line._super.render.call(this)) {\n\t return;\n\t }\n\t\n\t this.scanValues();\n\t this.processRangeOptions();\n\t\n\t xvalues = this.xvalues;\n\t yvalues = this.yvalues;\n\t\n\t if (!this.yminmax.length || this.yvalues.length < 2) {\n\t // empty or all null valuess\n\t return;\n\t }\n\t\n\t canvasTop = canvasLeft = 0;\n\t\n\t rangex = this.maxx - this.minx === 0 ? 1 : this.maxx - this.minx;\n\t rangey = this.maxy - this.miny === 0 ? 1 : this.maxy - this.miny;\n\t yvallast = this.yvalues.length - 1;\n\t\n\t if (spotRadius && (canvasWidth < (spotRadius * 4) || canvasHeight < (spotRadius * 4))) {\n\t spotRadius = 0;\n\t }\n\t if (spotRadius) {\n\t // adjust the canvas size as required so that spots will fit\n\t hlSpotsEnabled = options.get('highlightSpotColor') && !options.get('disableInteraction');\n\t if (hlSpotsEnabled || options.get('minSpotColor') || (options.get('spotColor') && yvalues[yvallast] === this.miny)) {\n\t canvasHeight -= Math.ceil(spotRadius);\n\t }\n\t if (hlSpotsEnabled || options.get('maxSpotColor') || (options.get('spotColor') && yvalues[yvallast] === this.maxy)) {\n\t canvasHeight -= Math.ceil(spotRadius);\n\t canvasTop += Math.ceil(spotRadius);\n\t }\n\t if (hlSpotsEnabled ||\n\t ((options.get('minSpotColor') || options.get('maxSpotColor')) && (yvalues[0] === this.miny || yvalues[0] === this.maxy))) {\n\t canvasLeft += Math.ceil(spotRadius);\n\t canvasWidth -= Math.ceil(spotRadius);\n\t }\n\t if (hlSpotsEnabled || options.get('spotColor') ||\n\t (options.get('minSpotColor') || options.get('maxSpotColor') &&\n\t (yvalues[yvallast] === this.miny || yvalues[yvallast] === this.maxy))) {\n\t canvasWidth -= Math.ceil(spotRadius);\n\t }\n\t }\n\t\n\t\n\t canvasHeight--;\n\t\n\t if (options.get('normalRangeMin') !== undefined && !options.get('drawNormalOnTop')) {\n\t this.drawNormalRange(canvasLeft, canvasTop, canvasHeight, canvasWidth, rangey);\n\t }\n\t\n\t path = [];\n\t paths = [path];\n\t last = next = null;\n\t yvalcount = yvalues.length;\n\t for (i = 0; i < yvalcount; i++) {\n\t x = xvalues[i];\n\t xnext = xvalues[i + 1];\n\t y = yvalues[i];\n\t xpos = canvasLeft + Math.round((x - this.minx) * (canvasWidth / rangex));\n\t xposnext = i < yvalcount - 1 ? canvasLeft + Math.round((xnext - this.minx) * (canvasWidth / rangex)) : canvasWidth;\n\t next = xpos + ((xposnext - xpos) / 2);\n\t regionMap[i] = [last || 0, next, i];\n\t last = next;\n\t if (y === null) {\n\t if (i) {\n\t if (yvalues[i - 1] !== null) {\n\t path = [];\n\t paths.push(path);\n\t }\n\t vertices.push(null);\n\t }\n\t } else {\n\t if (y < this.miny) {\n\t y = this.miny;\n\t }\n\t if (y > this.maxy) {\n\t y = this.maxy;\n\t }\n\t if (!path.length) {\n\t // previous value was null\n\t path.push([xpos, canvasTop + canvasHeight]);\n\t }\n\t vertex = [xpos, canvasTop + Math.round(canvasHeight - (canvasHeight * ((y - this.miny) / rangey)))];\n\t path.push(vertex);\n\t vertices.push(vertex);\n\t }\n\t }\n\t\n\t lineShapes = [];\n\t fillShapes = [];\n\t plen = paths.length;\n\t for (i = 0; i < plen; i++) {\n\t path = paths[i];\n\t if (path.length) {\n\t if (options.get('fillColor')) {\n\t path.push([path[path.length - 1][0], (canvasTop + canvasHeight)]);\n\t fillShapes.push(path.slice(0));\n\t path.pop();\n\t }\n\t // if there's only a single point in this path, then we want to display it\n\t // as a vertical line which means we keep path[0] as is\n\t if (path.length > 2) {\n\t // else we want the first value\n\t path[0] = [path[0][0], path[1][1]];\n\t }\n\t lineShapes.push(path);\n\t }\n\t }\n\t\n\t // draw the fill first, then optionally the normal range, then the line on top of that\n\t plen = fillShapes.length;\n\t for (i = 0; i < plen; i++) {\n\t target.drawShape(fillShapes[i],\n\t options.get('fillColor'), options.get('fillColor')).append();\n\t }\n\t\n\t if (options.get('normalRangeMin') !== undefined && options.get('drawNormalOnTop')) {\n\t this.drawNormalRange(canvasLeft, canvasTop, canvasHeight, canvasWidth, rangey);\n\t }\n\t\n\t plen = lineShapes.length;\n\t for (i = 0; i < plen; i++) {\n\t target.drawShape(lineShapes[i], options.get('lineColor'), undefined,\n\t options.get('lineWidth')).append();\n\t }\n\t\n\t if (spotRadius && options.get('valueSpots')) {\n\t valueSpots = options.get('valueSpots');\n\t if (valueSpots.get === undefined) {\n\t valueSpots = new RangeMap(valueSpots);\n\t }\n\t for (i = 0; i < yvalcount; i++) {\n\t color = valueSpots.get(yvalues[i]);\n\t if (color) {\n\t target.drawCircle(canvasLeft + Math.round((xvalues[i] - this.minx) * (canvasWidth / rangex)),\n\t canvasTop + Math.round(canvasHeight - (canvasHeight * ((yvalues[i] - this.miny) / rangey))),\n\t spotRadius, undefined,\n\t color).append();\n\t }\n\t }\n\t\n\t }\n\t if (spotRadius && options.get('spotColor') && yvalues[yvallast] !== null) {\n\t target.drawCircle(canvasLeft + Math.round((xvalues[xvalues.length - 1] - this.minx) * (canvasWidth / rangex)),\n\t canvasTop + Math.round(canvasHeight - (canvasHeight * ((yvalues[yvallast] - this.miny) / rangey))),\n\t spotRadius, undefined,\n\t options.get('spotColor')).append();\n\t }\n\t if (this.maxy !== this.minyorg) {\n\t if (spotRadius && options.get('minSpotColor')) {\n\t x = xvalues[$.inArray(this.minyorg, yvalues)];\n\t target.drawCircle(canvasLeft + Math.round((x - this.minx) * (canvasWidth / rangex)),\n\t canvasTop + Math.round(canvasHeight - (canvasHeight * ((this.minyorg - this.miny) / rangey))),\n\t spotRadius, undefined,\n\t options.get('minSpotColor')).append();\n\t }\n\t if (spotRadius && options.get('maxSpotColor')) {\n\t x = xvalues[$.inArray(this.maxyorg, yvalues)];\n\t target.drawCircle(canvasLeft + Math.round((x - this.minx) * (canvasWidth / rangex)),\n\t canvasTop + Math.round(canvasHeight - (canvasHeight * ((this.maxyorg - this.miny) / rangey))),\n\t spotRadius, undefined,\n\t options.get('maxSpotColor')).append();\n\t }\n\t }\n\t\n\t this.lastShapeId = target.getLastShapeId();\n\t this.canvasTop = canvasTop;\n\t target.render();\n\t }\n\t });\n\t\n\t /**\n\t * Bar charts\n\t */\n\t $.fn.sparkline.bar = bar = createClass($.fn.sparkline._base, barHighlightMixin, {\n\t type: 'bar',\n\t\n\t init: function (el, values, options, width, height) {\n\t var barWidth = parseInt(options.get('barWidth'), 10),\n\t barSpacing = parseInt(options.get('barSpacing'), 10),\n\t chartRangeMin = options.get('chartRangeMin'),\n\t chartRangeMax = options.get('chartRangeMax'),\n\t chartRangeClip = options.get('chartRangeClip'),\n\t stackMin = Infinity,\n\t stackMax = -Infinity,\n\t isStackString, groupMin, groupMax, stackRanges,\n\t numValues, i, vlen, range, zeroAxis, xaxisOffset, min, max, clipMin, clipMax,\n\t stacked, vlist, j, slen, svals, val, yoffset, yMaxCalc, canvasHeightEf;\n\t bar._super.init.call(this, el, values, options, width, height);\n\t\n\t // scan values to determine whether to stack bars\n\t for (i = 0, vlen = values.length; i < vlen; i++) {\n\t val = values[i];\n\t isStackString = typeof(val) === 'string' && val.indexOf(':') > -1;\n\t if (isStackString || $.isArray(val)) {\n\t stacked = true;\n\t if (isStackString) {\n\t val = values[i] = normalizeValues(val.split(':'));\n\t }\n\t val = remove(val, null); // min/max will treat null as zero\n\t groupMin = Math.min.apply(Math, val);\n\t groupMax = Math.max.apply(Math, val);\n\t if (groupMin < stackMin) {\n\t stackMin = groupMin;\n\t }\n\t if (groupMax > stackMax) {\n\t stackMax = groupMax;\n\t }\n\t }\n\t }\n\t\n\t this.stacked = stacked;\n\t this.regionShapes = {};\n\t this.barWidth = barWidth;\n\t this.barSpacing = barSpacing;\n\t this.totalBarWidth = barWidth + barSpacing;\n\t this.width = width = (values.length * barWidth) + ((values.length - 1) * barSpacing);\n\t\n\t this.initTarget();\n\t\n\t if (chartRangeClip) {\n\t clipMin = chartRangeMin === undefined ? -Infinity : chartRangeMin;\n\t clipMax = chartRangeMax === undefined ? Infinity : chartRangeMax;\n\t }\n\t\n\t numValues = [];\n\t stackRanges = stacked ? [] : numValues;\n\t var stackTotals = [];\n\t var stackRangesNeg = [];\n\t for (i = 0, vlen = values.length; i < vlen; i++) {\n\t if (stacked) {\n\t vlist = values[i];\n\t values[i] = svals = [];\n\t stackTotals[i] = 0;\n\t stackRanges[i] = stackRangesNeg[i] = 0;\n\t for (j = 0, slen = vlist.length; j < slen; j++) {\n\t val = svals[j] = chartRangeClip ? clipval(vlist[j], clipMin, clipMax) : vlist[j];\n\t if (val !== null) {\n\t if (val > 0) {\n\t stackTotals[i] += val;\n\t }\n\t if (stackMin < 0 && stackMax > 0) {\n\t if (val < 0) {\n\t stackRangesNeg[i] += Math.abs(val);\n\t } else {\n\t stackRanges[i] += val;\n\t }\n\t } else {\n\t stackRanges[i] += Math.abs(val - (val < 0 ? stackMax : stackMin));\n\t }\n\t numValues.push(val);\n\t }\n\t }\n\t } else {\n\t val = chartRangeClip ? clipval(values[i], clipMin, clipMax) : values[i];\n\t val = values[i] = normalizeValue(val);\n\t if (val !== null) {\n\t numValues.push(val);\n\t }\n\t }\n\t }\n\t this.max = max = Math.max.apply(Math, numValues);\n\t this.min = min = Math.min.apply(Math, numValues);\n\t this.stackMax = stackMax = stacked ? Math.max.apply(Math, stackTotals) : max;\n\t this.stackMin = stackMin = stacked ? Math.min.apply(Math, numValues) : min;\n\t\n\t if (options.get('chartRangeMin') !== undefined && (options.get('chartRangeClip') || options.get('chartRangeMin') < min)) {\n\t min = options.get('chartRangeMin');\n\t }\n\t if (options.get('chartRangeMax') !== undefined && (options.get('chartRangeClip') || options.get('chartRangeMax') > max)) {\n\t max = options.get('chartRangeMax');\n\t }\n\t\n\t this.zeroAxis = zeroAxis = options.get('zeroAxis', true);\n\t if (min <= 0 && max >= 0 && zeroAxis) {\n\t xaxisOffset = 0;\n\t } else if (zeroAxis == false) {\n\t xaxisOffset = min;\n\t } else if (min > 0) {\n\t xaxisOffset = min;\n\t } else {\n\t xaxisOffset = max;\n\t }\n\t this.xaxisOffset = xaxisOffset;\n\t\n\t range = stacked ? (Math.max.apply(Math, stackRanges) + Math.max.apply(Math, stackRangesNeg)) : max - min;\n\t\n\t // as we plot zero/min values a single pixel line, we add a pixel to all other\n\t // values - Reduce the effective canvas size to suit\n\t this.canvasHeightEf = (zeroAxis && min < 0) ? this.canvasHeight - 2 : this.canvasHeight - 1;\n\t\n\t if (min < xaxisOffset) {\n\t yMaxCalc = (stacked && max >= 0) ? stackMax : max;\n\t yoffset = (yMaxCalc - xaxisOffset) / range * this.canvasHeight;\n\t if (yoffset !== Math.ceil(yoffset)) {\n\t this.canvasHeightEf -= 2;\n\t yoffset = Math.ceil(yoffset);\n\t }\n\t } else {\n\t yoffset = this.canvasHeight;\n\t }\n\t this.yoffset = yoffset;\n\t\n\t if ($.isArray(options.get('colorMap'))) {\n\t this.colorMapByIndex = options.get('colorMap');\n\t this.colorMapByValue = null;\n\t } else {\n\t this.colorMapByIndex = null;\n\t this.colorMapByValue = options.get('colorMap');\n\t if (this.colorMapByValue && this.colorMapByValue.get === undefined) {\n\t this.colorMapByValue = new RangeMap(this.colorMapByValue);\n\t }\n\t }\n\t\n\t this.range = range;\n\t },\n\t\n\t getRegion: function (el, x, y) {\n\t var result = Math.floor(x / this.totalBarWidth);\n\t return (result < 0 || result >= this.values.length) ? undefined : result;\n\t },\n\t\n\t getCurrentRegionFields: function () {\n\t var currentRegion = this.currentRegion,\n\t values = ensureArray(this.values[currentRegion]),\n\t result = [],\n\t value, i;\n\t for (i = values.length; i--;) {\n\t value = values[i];\n\t result.push({\n\t isNull: value === null,\n\t value: value,\n\t color: this.calcColor(i, value, currentRegion),\n\t offset: currentRegion\n\t });\n\t }\n\t return result;\n\t },\n\t\n\t calcColor: function (stacknum, value, valuenum) {\n\t var colorMapByIndex = this.colorMapByIndex,\n\t colorMapByValue = this.colorMapByValue,\n\t options = this.options,\n\t color, newColor;\n\t if (this.stacked) {\n\t color = options.get('stackedBarColor');\n\t } else {\n\t color = (value < 0) ? options.get('negBarColor') : options.get('barColor');\n\t }\n\t if (value === 0 && options.get('zeroColor') !== undefined) {\n\t color = options.get('zeroColor');\n\t }\n\t if (colorMapByValue && (newColor = colorMapByValue.get(value))) {\n\t color = newColor;\n\t } else if (colorMapByIndex && colorMapByIndex.length > valuenum) {\n\t color = colorMapByIndex[valuenum];\n\t }\n\t return $.isArray(color) ? color[stacknum % color.length] : color;\n\t },\n\t\n\t /**\n\t * Render bar(s) for a region\n\t */\n\t renderRegion: function (valuenum, highlight) {\n\t var vals = this.values[valuenum],\n\t options = this.options,\n\t xaxisOffset = this.xaxisOffset,\n\t result = [],\n\t range = this.range,\n\t stacked = this.stacked,\n\t target = this.target,\n\t x = valuenum * this.totalBarWidth,\n\t canvasHeightEf = this.canvasHeightEf,\n\t yoffset = this.yoffset,\n\t y, height, color, isNull, yoffsetNeg, i, valcount, val, minPlotted, allMin;\n\t\n\t vals = $.isArray(vals) ? vals : [vals];\n\t valcount = vals.length;\n\t val = vals[0];\n\t isNull = all(null, vals);\n\t allMin = all(xaxisOffset, vals, true);\n\t\n\t if (isNull) {\n\t if (options.get('nullColor')) {\n\t color = highlight ? options.get('nullColor') : this.calcHighlightColor(options.get('nullColor'), options);\n\t y = (yoffset > 0) ? yoffset - 1 : yoffset;\n\t return target.drawRect(x, y, this.barWidth - 1, 0, color, color);\n\t } else {\n\t return undefined;\n\t }\n\t }\n\t yoffsetNeg = yoffset;\n\t for (i = 0; i < valcount; i++) {\n\t val = vals[i];\n\t\n\t if (stacked && val === xaxisOffset) {\n\t if (!allMin || minPlotted) {\n\t continue;\n\t }\n\t minPlotted = true;\n\t }\n\t\n\t if (range > 0) {\n\t height = Math.floor(canvasHeightEf * ((Math.abs(val - xaxisOffset) / range))) + 1;\n\t } else {\n\t height = 1;\n\t }\n\t if (val < xaxisOffset || (val === xaxisOffset && yoffset === 0)) {\n\t y = yoffsetNeg;\n\t yoffsetNeg += height;\n\t } else {\n\t y = yoffset - height;\n\t yoffset -= height;\n\t }\n\t color = this.calcColor(i, val, valuenum);\n\t if (highlight) {\n\t color = this.calcHighlightColor(color, options);\n\t }\n\t result.push(target.drawRect(x, y, this.barWidth - 1, height - 1, color, color));\n\t }\n\t if (result.length === 1) {\n\t return result[0];\n\t }\n\t return result;\n\t }\n\t });\n\t\n\t /**\n\t * Tristate charts\n\t */\n\t $.fn.sparkline.tristate = tristate = createClass($.fn.sparkline._base, barHighlightMixin, {\n\t type: 'tristate',\n\t\n\t init: function (el, values, options, width, height) {\n\t var barWidth = parseInt(options.get('barWidth'), 10),\n\t barSpacing = parseInt(options.get('barSpacing'), 10);\n\t tristate._super.init.call(this, el, values, options, width, height);\n\t\n\t this.regionShapes = {};\n\t this.barWidth = barWidth;\n\t this.barSpacing = barSpacing;\n\t this.totalBarWidth = barWidth + barSpacing;\n\t this.values = $.map(values, Number);\n\t this.width = width = (values.length * barWidth) + ((values.length - 1) * barSpacing);\n\t\n\t if ($.isArray(options.get('colorMap'))) {\n\t this.colorMapByIndex = options.get('colorMap');\n\t this.colorMapByValue = null;\n\t } else {\n\t this.colorMapByIndex = null;\n\t this.colorMapByValue = options.get('colorMap');\n\t if (this.colorMapByValue && this.colorMapByValue.get === undefined) {\n\t this.colorMapByValue = new RangeMap(this.colorMapByValue);\n\t }\n\t }\n\t this.initTarget();\n\t },\n\t\n\t getRegion: function (el, x, y) {\n\t return Math.floor(x / this.totalBarWidth);\n\t },\n\t\n\t getCurrentRegionFields: function () {\n\t var currentRegion = this.currentRegion;\n\t return {\n\t isNull: this.values[currentRegion] === undefined,\n\t value: this.values[currentRegion],\n\t color: this.calcColor(this.values[currentRegion], currentRegion),\n\t offset: currentRegion\n\t };\n\t },\n\t\n\t calcColor: function (value, valuenum) {\n\t var values = this.values,\n\t options = this.options,\n\t colorMapByIndex = this.colorMapByIndex,\n\t colorMapByValue = this.colorMapByValue,\n\t color, newColor;\n\t\n\t if (colorMapByValue && (newColor = colorMapByValue.get(value))) {\n\t color = newColor;\n\t } else if (colorMapByIndex && colorMapByIndex.length > valuenum) {\n\t color = colorMapByIndex[valuenum];\n\t } else if (values[valuenum] < 0) {\n\t color = options.get('negBarColor');\n\t } else if (values[valuenum] > 0) {\n\t color = options.get('posBarColor');\n\t } else {\n\t color = options.get('zeroBarColor');\n\t }\n\t return color;\n\t },\n\t\n\t renderRegion: function (valuenum, highlight) {\n\t var values = this.values,\n\t options = this.options,\n\t target = this.target,\n\t canvasHeight, height, halfHeight,\n\t x, y, color;\n\t\n\t canvasHeight = target.pixelHeight;\n\t halfHeight = Math.round(canvasHeight / 2);\n\t\n\t x = valuenum * this.totalBarWidth;\n\t if (values[valuenum] < 0) {\n\t y = halfHeight;\n\t height = halfHeight - 1;\n\t } else if (values[valuenum] > 0) {\n\t y = 0;\n\t height = halfHeight - 1;\n\t } else {\n\t y = halfHeight - 1;\n\t height = 2;\n\t }\n\t color = this.calcColor(values[valuenum], valuenum);\n\t if (color === null) {\n\t return;\n\t }\n\t if (highlight) {\n\t color = this.calcHighlightColor(color, options);\n\t }\n\t return target.drawRect(x, y, this.barWidth - 1, height - 1, color, color);\n\t }\n\t });\n\t\n\t /**\n\t * Discrete charts\n\t */\n\t $.fn.sparkline.discrete = discrete = createClass($.fn.sparkline._base, barHighlightMixin, {\n\t type: 'discrete',\n\t\n\t init: function (el, values, options, width, height) {\n\t discrete._super.init.call(this, el, values, options, width, height);\n\t\n\t this.regionShapes = {};\n\t this.values = values = $.map(values, Number);\n\t this.min = Math.min.apply(Math, values);\n\t this.max = Math.max.apply(Math, values);\n\t this.range = this.max - this.min;\n\t this.width = width = options.get('width') === 'auto' ? values.length * 2 : this.width;\n\t this.interval = Math.floor(width / values.length);\n\t this.itemWidth = width / values.length;\n\t if (options.get('chartRangeMin') !== undefined && (options.get('chartRangeClip') || options.get('chartRangeMin') < this.min)) {\n\t this.min = options.get('chartRangeMin');\n\t }\n\t if (options.get('chartRangeMax') !== undefined && (options.get('chartRangeClip') || options.get('chartRangeMax') > this.max)) {\n\t this.max = options.get('chartRangeMax');\n\t }\n\t this.initTarget();\n\t if (this.target) {\n\t this.lineHeight = options.get('lineHeight') === 'auto' ? Math.round(this.canvasHeight * 0.3) : options.get('lineHeight');\n\t }\n\t },\n\t\n\t getRegion: function (el, x, y) {\n\t return Math.floor(x / this.itemWidth);\n\t },\n\t\n\t getCurrentRegionFields: function () {\n\t var currentRegion = this.currentRegion;\n\t return {\n\t isNull: this.values[currentRegion] === undefined,\n\t value: this.values[currentRegion],\n\t offset: currentRegion\n\t };\n\t },\n\t\n\t renderRegion: function (valuenum, highlight) {\n\t var values = this.values,\n\t options = this.options,\n\t min = this.min,\n\t max = this.max,\n\t range = this.range,\n\t interval = this.interval,\n\t target = this.target,\n\t canvasHeight = this.canvasHeight,\n\t lineHeight = this.lineHeight,\n\t pheight = canvasHeight - lineHeight,\n\t ytop, val, color, x;\n\t\n\t val = clipval(values[valuenum], min, max);\n\t x = valuenum * interval;\n\t ytop = Math.round(pheight - pheight * ((val - min) / range));\n\t color = (options.get('thresholdColor') && val < options.get('thresholdValue')) ? options.get('thresholdColor') : options.get('lineColor');\n\t if (highlight) {\n\t color = this.calcHighlightColor(color, options);\n\t }\n\t return target.drawLine(x, ytop, x, ytop + lineHeight, color);\n\t }\n\t });\n\t\n\t /**\n\t * Bullet charts\n\t */\n\t $.fn.sparkline.bullet = bullet = createClass($.fn.sparkline._base, {\n\t type: 'bullet',\n\t\n\t init: function (el, values, options, width, height) {\n\t var min, max, vals;\n\t bullet._super.init.call(this, el, values, options, width, height);\n\t\n\t // values: target, performance, range1, range2, range3\n\t this.values = values = normalizeValues(values);\n\t // target or performance could be null\n\t vals = values.slice();\n\t vals[0] = vals[0] === null ? vals[2] : vals[0];\n\t vals[1] = values[1] === null ? vals[2] : vals[1];\n\t min = Math.min.apply(Math, values);\n\t max = Math.max.apply(Math, values);\n\t if (options.get('base') === undefined) {\n\t min = min < 0 ? min : 0;\n\t } else {\n\t min = options.get('base');\n\t }\n\t this.min = min;\n\t this.max = max;\n\t this.range = max - min;\n\t this.shapes = {};\n\t this.valueShapes = {};\n\t this.regiondata = {};\n\t this.width = width = options.get('width') === 'auto' ? '4.0em' : width;\n\t this.target = this.$el.simpledraw(width, height, options.get('composite'));\n\t if (!values.length) {\n\t this.disabled = true;\n\t }\n\t this.initTarget();\n\t },\n\t\n\t getRegion: function (el, x, y) {\n\t var shapeid = this.target.getShapeAt(el, x, y);\n\t return (shapeid !== undefined && this.shapes[shapeid] !== undefined) ? this.shapes[shapeid] : undefined;\n\t },\n\t\n\t getCurrentRegionFields: function () {\n\t var currentRegion = this.currentRegion;\n\t return {\n\t fieldkey: currentRegion.substr(0, 1),\n\t value: this.values[currentRegion.substr(1)],\n\t region: currentRegion\n\t };\n\t },\n\t\n\t changeHighlight: function (highlight) {\n\t var currentRegion = this.currentRegion,\n\t shapeid = this.valueShapes[currentRegion],\n\t shape;\n\t delete this.shapes[shapeid];\n\t switch (currentRegion.substr(0, 1)) {\n\t case 'r':\n\t shape = this.renderRange(currentRegion.substr(1), highlight);\n\t break;\n\t case 'p':\n\t shape = this.renderPerformance(highlight);\n\t break;\n\t case 't':\n\t shape = this.renderTarget(highlight);\n\t break;\n\t }\n\t this.valueShapes[currentRegion] = shape.id;\n\t this.shapes[shape.id] = currentRegion;\n\t this.target.replaceWithShape(shapeid, shape);\n\t },\n\t\n\t renderRange: function (rn, highlight) {\n\t var rangeval = this.values[rn],\n\t rangewidth = Math.round(this.canvasWidth * ((rangeval - this.min) / this.range)),\n\t color = this.options.get('rangeColors')[rn - 2];\n\t if (highlight) {\n\t color = this.calcHighlightColor(color, this.options);\n\t }\n\t return this.target.drawRect(0, 0, rangewidth - 1, this.canvasHeight - 1, color, color);\n\t },\n\t\n\t renderPerformance: function (highlight) {\n\t var perfval = this.values[1],\n\t perfwidth = Math.round(this.canvasWidth * ((perfval - this.min) / this.range)),\n\t color = this.options.get('performanceColor');\n\t if (highlight) {\n\t color = this.calcHighlightColor(color, this.options);\n\t }\n\t return this.target.drawRect(0, Math.round(this.canvasHeight * 0.3), perfwidth - 1,\n\t Math.round(this.canvasHeight * 0.4) - 1, color, color);\n\t },\n\t\n\t renderTarget: function (highlight) {\n\t var targetval = this.values[0],\n\t x = Math.round(this.canvasWidth * ((targetval - this.min) / this.range) - (this.options.get('targetWidth') / 2)),\n\t targettop = Math.round(this.canvasHeight * 0.10),\n\t targetheight = this.canvasHeight - (targettop * 2),\n\t color = this.options.get('targetColor');\n\t if (highlight) {\n\t color = this.calcHighlightColor(color, this.options);\n\t }\n\t return this.target.drawRect(x, targettop, this.options.get('targetWidth') - 1, targetheight - 1, color, color);\n\t },\n\t\n\t render: function () {\n\t var vlen = this.values.length,\n\t target = this.target,\n\t i, shape;\n\t if (!bullet._super.render.call(this)) {\n\t return;\n\t }\n\t for (i = 2; i < vlen; i++) {\n\t shape = this.renderRange(i).append();\n\t this.shapes[shape.id] = 'r' + i;\n\t this.valueShapes['r' + i] = shape.id;\n\t }\n\t if (this.values[1] !== null) {\n\t shape = this.renderPerformance().append();\n\t this.shapes[shape.id] = 'p1';\n\t this.valueShapes.p1 = shape.id;\n\t }\n\t if (this.values[0] !== null) {\n\t shape = this.renderTarget().append();\n\t this.shapes[shape.id] = 't0';\n\t this.valueShapes.t0 = shape.id;\n\t }\n\t target.render();\n\t }\n\t });\n\t\n\t /**\n\t * Pie charts\n\t */\n\t $.fn.sparkline.pie = pie = createClass($.fn.sparkline._base, {\n\t type: 'pie',\n\t\n\t init: function (el, values, options, width, height) {\n\t var total = 0, i;\n\t\n\t pie._super.init.call(this, el, values, options, width, height);\n\t\n\t this.shapes = {}; // map shape ids to value offsets\n\t this.valueShapes = {}; // maps value offsets to shape ids\n\t this.values = values = $.map(values, Number);\n\t\n\t if (options.get('width') === 'auto') {\n\t this.width = this.height;\n\t }\n\t\n\t if (values.length > 0) {\n\t for (i = values.length; i--;) {\n\t total += values[i];\n\t }\n\t }\n\t this.total = total;\n\t this.initTarget();\n\t this.radius = Math.floor(Math.min(this.canvasWidth, this.canvasHeight) / 2);\n\t },\n\t\n\t getRegion: function (el, x, y) {\n\t var shapeid = this.target.getShapeAt(el, x, y);\n\t return (shapeid !== undefined && this.shapes[shapeid] !== undefined) ? this.shapes[shapeid] : undefined;\n\t },\n\t\n\t getCurrentRegionFields: function () {\n\t var currentRegion = this.currentRegion;\n\t return {\n\t isNull: this.values[currentRegion] === undefined,\n\t value: this.values[currentRegion],\n\t percent: this.values[currentRegion] / this.total * 100,\n\t color: this.options.get('sliceColors')[currentRegion % this.options.get('sliceColors').length],\n\t offset: currentRegion\n\t };\n\t },\n\t\n\t changeHighlight: function (highlight) {\n\t var currentRegion = this.currentRegion,\n\t newslice = this.renderSlice(currentRegion, highlight),\n\t shapeid = this.valueShapes[currentRegion];\n\t delete this.shapes[shapeid];\n\t this.target.replaceWithShape(shapeid, newslice);\n\t this.valueShapes[currentRegion] = newslice.id;\n\t this.shapes[newslice.id] = currentRegion;\n\t },\n\t\n\t renderSlice: function (valuenum, highlight) {\n\t var target = this.target,\n\t options = this.options,\n\t radius = this.radius,\n\t borderWidth = options.get('borderWidth'),\n\t offset = options.get('offset'),\n\t circle = 2 * Math.PI,\n\t values = this.values,\n\t total = this.total,\n\t next = offset ? (2*Math.PI)*(offset/360) : 0,\n\t start, end, i, vlen, color;\n\t\n\t vlen = values.length;\n\t for (i = 0; i < vlen; i++) {\n\t start = next;\n\t end = next;\n\t if (total > 0) { // avoid divide by zero\n\t end = next + (circle * (values[i] / total));\n\t }\n\t if (valuenum === i) {\n\t color = options.get('sliceColors')[i % options.get('sliceColors').length];\n\t if (highlight) {\n\t color = this.calcHighlightColor(color, options);\n\t }\n\t\n\t return target.drawPieSlice(radius, radius, radius - borderWidth, start, end, undefined, color);\n\t }\n\t next = end;\n\t }\n\t },\n\t\n\t render: function () {\n\t var target = this.target,\n\t values = this.values,\n\t options = this.options,\n\t radius = this.radius,\n\t borderWidth = options.get('borderWidth'),\n\t shape, i;\n\t\n\t if (!pie._super.render.call(this)) {\n\t return;\n\t }\n\t if (borderWidth) {\n\t target.drawCircle(radius, radius, Math.floor(radius - (borderWidth / 2)),\n\t options.get('borderColor'), undefined, borderWidth).append();\n\t }\n\t for (i = values.length; i--;) {\n\t if (values[i]) { // don't render zero values\n\t shape = this.renderSlice(i).append();\n\t this.valueShapes[i] = shape.id; // store just the shapeid\n\t this.shapes[shape.id] = i;\n\t }\n\t }\n\t target.render();\n\t }\n\t });\n\t\n\t /**\n\t * Box plots\n\t */\n\t $.fn.sparkline.box = box = createClass($.fn.sparkline._base, {\n\t type: 'box',\n\t\n\t init: function (el, values, options, width, height) {\n\t box._super.init.call(this, el, values, options, width, height);\n\t this.values = $.map(values, Number);\n\t this.width = options.get('width') === 'auto' ? '4.0em' : width;\n\t this.initTarget();\n\t if (!this.values.length) {\n\t this.disabled = 1;\n\t }\n\t },\n\t\n\t /**\n\t * Simulate a single region\n\t */\n\t getRegion: function () {\n\t return 1;\n\t },\n\t\n\t getCurrentRegionFields: function () {\n\t var result = [\n\t { field: 'lq', value: this.quartiles[0] },\n\t { field: 'med', value: this.quartiles[1] },\n\t { field: 'uq', value: this.quartiles[2] }\n\t ];\n\t if (this.loutlier !== undefined) {\n\t result.push({ field: 'lo', value: this.loutlier});\n\t }\n\t if (this.routlier !== undefined) {\n\t result.push({ field: 'ro', value: this.routlier});\n\t }\n\t if (this.lwhisker !== undefined) {\n\t result.push({ field: 'lw', value: this.lwhisker});\n\t }\n\t if (this.rwhisker !== undefined) {\n\t result.push({ field: 'rw', value: this.rwhisker});\n\t }\n\t return result;\n\t },\n\t\n\t render: function () {\n\t var target = this.target,\n\t values = this.values,\n\t vlen = values.length,\n\t options = this.options,\n\t canvasWidth = this.canvasWidth,\n\t canvasHeight = this.canvasHeight,\n\t minValue = options.get('chartRangeMin') === undefined ? Math.min.apply(Math, values) : options.get('chartRangeMin'),\n\t maxValue = options.get('chartRangeMax') === undefined ? Math.max.apply(Math, values) : options.get('chartRangeMax'),\n\t canvasLeft = 0,\n\t lwhisker, loutlier, iqr, q1, q2, q3, rwhisker, routlier, i,\n\t size, unitSize;\n\t\n\t if (!box._super.render.call(this)) {\n\t return;\n\t }\n\t\n\t if (options.get('raw')) {\n\t if (options.get('showOutliers') && values.length > 5) {\n\t loutlier = values[0];\n\t lwhisker = values[1];\n\t q1 = values[2];\n\t q2 = values[3];\n\t q3 = values[4];\n\t rwhisker = values[5];\n\t routlier = values[6];\n\t } else {\n\t lwhisker = values[0];\n\t q1 = values[1];\n\t q2 = values[2];\n\t q3 = values[3];\n\t rwhisker = values[4];\n\t }\n\t } else {\n\t values.sort(function (a, b) { return a - b; });\n\t q1 = quartile(values, 1);\n\t q2 = quartile(values, 2);\n\t q3 = quartile(values, 3);\n\t iqr = q3 - q1;\n\t if (options.get('showOutliers')) {\n\t lwhisker = rwhisker = undefined;\n\t for (i = 0; i < vlen; i++) {\n\t if (lwhisker === undefined && values[i] > q1 - (iqr * options.get('outlierIQR'))) {\n\t lwhisker = values[i];\n\t }\n\t if (values[i] < q3 + (iqr * options.get('outlierIQR'))) {\n\t rwhisker = values[i];\n\t }\n\t }\n\t loutlier = values[0];\n\t routlier = values[vlen - 1];\n\t } else {\n\t lwhisker = values[0];\n\t rwhisker = values[vlen - 1];\n\t }\n\t }\n\t this.quartiles = [q1, q2, q3];\n\t this.lwhisker = lwhisker;\n\t this.rwhisker = rwhisker;\n\t this.loutlier = loutlier;\n\t this.routlier = routlier;\n\t\n\t unitSize = canvasWidth / (maxValue - minValue + 1);\n\t if (options.get('showOutliers')) {\n\t canvasLeft = Math.ceil(options.get('spotRadius'));\n\t canvasWidth -= 2 * Math.ceil(options.get('spotRadius'));\n\t unitSize = canvasWidth / (maxValue - minValue + 1);\n\t if (loutlier < lwhisker) {\n\t target.drawCircle((loutlier - minValue) * unitSize + canvasLeft,\n\t canvasHeight / 2,\n\t options.get('spotRadius'),\n\t options.get('outlierLineColor'),\n\t options.get('outlierFillColor')).append();\n\t }\n\t if (routlier > rwhisker) {\n\t target.drawCircle((routlier - minValue) * unitSize + canvasLeft,\n\t canvasHeight / 2,\n\t options.get('spotRadius'),\n\t options.get('outlierLineColor'),\n\t options.get('outlierFillColor')).append();\n\t }\n\t }\n\t\n\t // box\n\t target.drawRect(\n\t Math.round((q1 - minValue) * unitSize + canvasLeft),\n\t Math.round(canvasHeight * 0.1),\n\t Math.round((q3 - q1) * unitSize),\n\t Math.round(canvasHeight * 0.8),\n\t options.get('boxLineColor'),\n\t options.get('boxFillColor')).append();\n\t // left whisker\n\t target.drawLine(\n\t Math.round((lwhisker - minValue) * unitSize + canvasLeft),\n\t Math.round(canvasHeight / 2),\n\t Math.round((q1 - minValue) * unitSize + canvasLeft),\n\t Math.round(canvasHeight / 2),\n\t options.get('lineColor')).append();\n\t target.drawLine(\n\t Math.round((lwhisker - minValue) * unitSize + canvasLeft),\n\t Math.round(canvasHeight / 4),\n\t Math.round((lwhisker - minValue) * unitSize + canvasLeft),\n\t Math.round(canvasHeight - canvasHeight / 4),\n\t options.get('whiskerColor')).append();\n\t // right whisker\n\t target.drawLine(Math.round((rwhisker - minValue) * unitSize + canvasLeft),\n\t Math.round(canvasHeight / 2),\n\t Math.round((q3 - minValue) * unitSize + canvasLeft),\n\t Math.round(canvasHeight / 2),\n\t options.get('lineColor')).append();\n\t target.drawLine(\n\t Math.round((rwhisker - minValue) * unitSize + canvasLeft),\n\t Math.round(canvasHeight / 4),\n\t Math.round((rwhisker - minValue) * unitSize + canvasLeft),\n\t Math.round(canvasHeight - canvasHeight / 4),\n\t options.get('whiskerColor')).append();\n\t // median line\n\t target.drawLine(\n\t Math.round((q2 - minValue) * unitSize + canvasLeft),\n\t Math.round(canvasHeight * 0.1),\n\t Math.round((q2 - minValue) * unitSize + canvasLeft),\n\t Math.round(canvasHeight * 0.9),\n\t options.get('medianColor')).append();\n\t if (options.get('target')) {\n\t size = Math.ceil(options.get('spotRadius'));\n\t target.drawLine(\n\t Math.round((options.get('target') - minValue) * unitSize + canvasLeft),\n\t Math.round((canvasHeight / 2) - size),\n\t Math.round((options.get('target') - minValue) * unitSize + canvasLeft),\n\t Math.round((canvasHeight / 2) + size),\n\t options.get('targetColor')).append();\n\t target.drawLine(\n\t Math.round((options.get('target') - minValue) * unitSize + canvasLeft - size),\n\t Math.round(canvasHeight / 2),\n\t Math.round((options.get('target') - minValue) * unitSize + canvasLeft + size),\n\t Math.round(canvasHeight / 2),\n\t options.get('targetColor')).append();\n\t }\n\t target.render();\n\t }\n\t });\n\t\n\t // Setup a very simple \"virtual canvas\" to make drawing the few shapes we need easier\n\t // This is accessible as $(foo).simpledraw()\n\t\n\t VShape = createClass({\n\t init: function (target, id, type, args) {\n\t this.target = target;\n\t this.id = id;\n\t this.type = type;\n\t this.args = args;\n\t },\n\t append: function () {\n\t this.target.appendShape(this);\n\t return this;\n\t }\n\t });\n\t\n\t VCanvas_base = createClass({\n\t _pxregex: /(\\d+)(px)?\\s*$/i,\n\t\n\t init: function (width, height, target) {\n\t if (!width) {\n\t return;\n\t }\n\t this.width = width;\n\t this.height = height;\n\t this.target = target;\n\t this.lastShapeId = null;\n\t if (target[0]) {\n\t target = target[0];\n\t }\n\t $.data(target, '_jqs_vcanvas', this);\n\t },\n\t\n\t drawLine: function (x1, y1, x2, y2, lineColor, lineWidth) {\n\t return this.drawShape([[x1, y1], [x2, y2]], lineColor, lineWidth);\n\t },\n\t\n\t drawShape: function (path, lineColor, fillColor, lineWidth) {\n\t return this._genShape('Shape', [path, lineColor, fillColor, lineWidth]);\n\t },\n\t\n\t drawCircle: function (x, y, radius, lineColor, fillColor, lineWidth) {\n\t return this._genShape('Circle', [x, y, radius, lineColor, fillColor, lineWidth]);\n\t },\n\t\n\t drawPieSlice: function (x, y, radius, startAngle, endAngle, lineColor, fillColor) {\n\t return this._genShape('PieSlice', [x, y, radius, startAngle, endAngle, lineColor, fillColor]);\n\t },\n\t\n\t drawRect: function (x, y, width, height, lineColor, fillColor) {\n\t return this._genShape('Rect', [x, y, width, height, lineColor, fillColor]);\n\t },\n\t\n\t getElement: function () {\n\t return this.canvas;\n\t },\n\t\n\t /**\n\t * Return the most recently inserted shape id\n\t */\n\t getLastShapeId: function () {\n\t return this.lastShapeId;\n\t },\n\t\n\t /**\n\t * Clear and reset the canvas\n\t */\n\t reset: function () {\n\t alert('reset not implemented');\n\t },\n\t\n\t _insert: function (el, target) {\n\t $(target).html(el);\n\t },\n\t\n\t /**\n\t * Calculate the pixel dimensions of the canvas\n\t */\n\t _calculatePixelDims: function (width, height, canvas) {\n\t // XXX This should probably be a configurable option\n\t var match;\n\t match = this._pxregex.exec(height);\n\t if (match) {\n\t this.pixelHeight = match[1];\n\t } else {\n\t this.pixelHeight = $(canvas).height();\n\t }\n\t match = this._pxregex.exec(width);\n\t if (match) {\n\t this.pixelWidth = match[1];\n\t } else {\n\t this.pixelWidth = $(canvas).width();\n\t }\n\t },\n\t\n\t /**\n\t * Generate a shape object and id for later rendering\n\t */\n\t _genShape: function (shapetype, shapeargs) {\n\t var id = shapeCount++;\n\t shapeargs.unshift(id);\n\t return new VShape(this, id, shapetype, shapeargs);\n\t },\n\t\n\t /**\n\t * Add a shape to the end of the render queue\n\t */\n\t appendShape: function (shape) {\n\t alert('appendShape not implemented');\n\t },\n\t\n\t /**\n\t * Replace one shape with another\n\t */\n\t replaceWithShape: function (shapeid, shape) {\n\t alert('replaceWithShape not implemented');\n\t },\n\t\n\t /**\n\t * Insert one shape after another in the render queue\n\t */\n\t insertAfterShape: function (shapeid, shape) {\n\t alert('insertAfterShape not implemented');\n\t },\n\t\n\t /**\n\t * Remove a shape from the queue\n\t */\n\t removeShapeId: function (shapeid) {\n\t alert('removeShapeId not implemented');\n\t },\n\t\n\t /**\n\t * Find a shape at the specified x/y co-ordinates\n\t */\n\t getShapeAt: function (el, x, y) {\n\t alert('getShapeAt not implemented');\n\t },\n\t\n\t /**\n\t * Render all queued shapes onto the canvas\n\t */\n\t render: function () {\n\t alert('render not implemented');\n\t }\n\t });\n\t\n\t VCanvas_canvas = createClass(VCanvas_base, {\n\t init: function (width, height, target, interact) {\n\t VCanvas_canvas._super.init.call(this, width, height, target);\n\t this.canvas = document.createElement('canvas');\n\t if (target[0]) {\n\t target = target[0];\n\t }\n\t $.data(target, '_jqs_vcanvas', this);\n\t $(this.canvas).css({ display: 'inline-block', width: width, height: height, verticalAlign: 'top' });\n\t this._insert(this.canvas, target);\n\t this._calculatePixelDims(width, height, this.canvas);\n\t this.canvas.width = this.pixelWidth;\n\t this.canvas.height = this.pixelHeight;\n\t this.interact = interact;\n\t this.shapes = {};\n\t this.shapeseq = [];\n\t this.currentTargetShapeId = undefined;\n\t $(this.canvas).css({width: this.pixelWidth, height: this.pixelHeight});\n\t },\n\t\n\t _getContext: function (lineColor, fillColor, lineWidth) {\n\t var context = this.canvas.getContext('2d');\n\t if (lineColor !== undefined) {\n\t context.strokeStyle = lineColor;\n\t }\n\t context.lineWidth = lineWidth === undefined ? 1 : lineWidth;\n\t if (fillColor !== undefined) {\n\t context.fillStyle = fillColor;\n\t }\n\t return context;\n\t },\n\t\n\t reset: function () {\n\t var context = this._getContext();\n\t context.clearRect(0, 0, this.pixelWidth, this.pixelHeight);\n\t this.shapes = {};\n\t this.shapeseq = [];\n\t this.currentTargetShapeId = undefined;\n\t },\n\t\n\t _drawShape: function (shapeid, path, lineColor, fillColor, lineWidth) {\n\t var context = this._getContext(lineColor, fillColor, lineWidth),\n\t i, plen;\n\t context.beginPath();\n\t context.moveTo(path[0][0] + 0.5, path[0][1] + 0.5);\n\t for (i = 1, plen = path.length; i < plen; i++) {\n\t context.lineTo(path[i][0] + 0.5, path[i][1] + 0.5); // the 0.5 offset gives us crisp pixel-width lines\n\t }\n\t if (lineColor !== undefined) {\n\t context.stroke();\n\t }\n\t if (fillColor !== undefined) {\n\t context.fill();\n\t }\n\t if (this.targetX !== undefined && this.targetY !== undefined &&\n\t context.isPointInPath(this.targetX, this.targetY)) {\n\t this.currentTargetShapeId = shapeid;\n\t }\n\t },\n\t\n\t _drawCircle: function (shapeid, x, y, radius, lineColor, fillColor, lineWidth) {\n\t var context = this._getContext(lineColor, fillColor, lineWidth);\n\t context.beginPath();\n\t context.arc(x, y, radius, 0, 2 * Math.PI, false);\n\t if (this.targetX !== undefined && this.targetY !== undefined &&\n\t context.isPointInPath(this.targetX, this.targetY)) {\n\t this.currentTargetShapeId = shapeid;\n\t }\n\t if (lineColor !== undefined) {\n\t context.stroke();\n\t }\n\t if (fillColor !== undefined) {\n\t context.fill();\n\t }\n\t },\n\t\n\t _drawPieSlice: function (shapeid, x, y, radius, startAngle, endAngle, lineColor, fillColor) {\n\t var context = this._getContext(lineColor, fillColor);\n\t context.beginPath();\n\t context.moveTo(x, y);\n\t context.arc(x, y, radius, startAngle, endAngle, false);\n\t context.lineTo(x, y);\n\t context.closePath();\n\t if (lineColor !== undefined) {\n\t context.stroke();\n\t }\n\t if (fillColor) {\n\t context.fill();\n\t }\n\t if (this.targetX !== undefined && this.targetY !== undefined &&\n\t context.isPointInPath(this.targetX, this.targetY)) {\n\t this.currentTargetShapeId = shapeid;\n\t }\n\t },\n\t\n\t _drawRect: function (shapeid, x, y, width, height, lineColor, fillColor) {\n\t return this._drawShape(shapeid, [[x, y], [x + width, y], [x + width, y + height], [x, y + height], [x, y]], lineColor, fillColor);\n\t },\n\t\n\t appendShape: function (shape) {\n\t this.shapes[shape.id] = shape;\n\t this.shapeseq.push(shape.id);\n\t this.lastShapeId = shape.id;\n\t return shape.id;\n\t },\n\t\n\t replaceWithShape: function (shapeid, shape) {\n\t var shapeseq = this.shapeseq,\n\t i;\n\t this.shapes[shape.id] = shape;\n\t for (i = shapeseq.length; i--;) {\n\t if (shapeseq[i] == shapeid) {\n\t shapeseq[i] = shape.id;\n\t }\n\t }\n\t delete this.shapes[shapeid];\n\t },\n\t\n\t replaceWithShapes: function (shapeids, shapes) {\n\t var shapeseq = this.shapeseq,\n\t shapemap = {},\n\t sid, i, first;\n\t\n\t for (i = shapeids.length; i--;) {\n\t shapemap[shapeids[i]] = true;\n\t }\n\t for (i = shapeseq.length; i--;) {\n\t sid = shapeseq[i];\n\t if (shapemap[sid]) {\n\t shapeseq.splice(i, 1);\n\t delete this.shapes[sid];\n\t first = i;\n\t }\n\t }\n\t for (i = shapes.length; i--;) {\n\t shapeseq.splice(first, 0, shapes[i].id);\n\t this.shapes[shapes[i].id] = shapes[i];\n\t }\n\t\n\t },\n\t\n\t insertAfterShape: function (shapeid, shape) {\n\t var shapeseq = this.shapeseq,\n\t i;\n\t for (i = shapeseq.length; i--;) {\n\t if (shapeseq[i] === shapeid) {\n\t shapeseq.splice(i + 1, 0, shape.id);\n\t this.shapes[shape.id] = shape;\n\t return;\n\t }\n\t }\n\t },\n\t\n\t removeShapeId: function (shapeid) {\n\t var shapeseq = this.shapeseq,\n\t i;\n\t for (i = shapeseq.length; i--;) {\n\t if (shapeseq[i] === shapeid) {\n\t shapeseq.splice(i, 1);\n\t break;\n\t }\n\t }\n\t delete this.shapes[shapeid];\n\t },\n\t\n\t getShapeAt: function (el, x, y) {\n\t this.targetX = x;\n\t this.targetY = y;\n\t this.render();\n\t return this.currentTargetShapeId;\n\t },\n\t\n\t render: function () {\n\t var shapeseq = this.shapeseq,\n\t shapes = this.shapes,\n\t shapeCount = shapeseq.length,\n\t context = this._getContext(),\n\t shapeid, shape, i;\n\t context.clearRect(0, 0, this.pixelWidth, this.pixelHeight);\n\t for (i = 0; i < shapeCount; i++) {\n\t shapeid = shapeseq[i];\n\t shape = shapes[shapeid];\n\t this['_draw' + shape.type].apply(this, shape.args);\n\t }\n\t if (!this.interact) {\n\t // not interactive so no need to keep the shapes array\n\t this.shapes = {};\n\t this.shapeseq = [];\n\t }\n\t }\n\t\n\t });\n\t\n\t VCanvas_vml = createClass(VCanvas_base, {\n\t init: function (width, height, target) {\n\t var groupel;\n\t VCanvas_vml._super.init.call(this, width, height, target);\n\t if (target[0]) {\n\t target = target[0];\n\t }\n\t $.data(target, '_jqs_vcanvas', this);\n\t this.canvas = document.createElement('span');\n\t $(this.canvas).css({ display: 'inline-block', position: 'relative', overflow: 'hidden', width: width, height: height, margin: '0px', padding: '0px', verticalAlign: 'top'});\n\t this._insert(this.canvas, target);\n\t this._calculatePixelDims(width, height, this.canvas);\n\t this.canvas.width = this.pixelWidth;\n\t this.canvas.height = this.pixelHeight;\n\t groupel = '';\n\t this.canvas.insertAdjacentHTML('beforeEnd', groupel);\n\t this.group = $(this.canvas).children()[0];\n\t this.rendered = false;\n\t this.prerender = '';\n\t },\n\t\n\t _drawShape: function (shapeid, path, lineColor, fillColor, lineWidth) {\n\t var vpath = [],\n\t initial, stroke, fill, closed, vel, plen, i;\n\t for (i = 0, plen = path.length; i < plen; i++) {\n\t vpath[i] = '' + (path[i][0]) + ',' + (path[i][1]);\n\t }\n\t initial = vpath.splice(0, 1);\n\t lineWidth = lineWidth === undefined ? 1 : lineWidth;\n\t stroke = lineColor === undefined ? ' stroked=\"false\" ' : ' strokeWeight=\"' + lineWidth + 'px\" strokeColor=\"' + lineColor + '\" ';\n\t fill = fillColor === undefined ? ' filled=\"false\"' : ' fillColor=\"' + fillColor + '\" filled=\"true\" ';\n\t closed = vpath[0] === vpath[vpath.length - 1] ? 'x ' : '';\n\t vel = '' +\n\t ' ';\n\t return vel;\n\t },\n\t\n\t _drawCircle: function (shapeid, x, y, radius, lineColor, fillColor, lineWidth) {\n\t var stroke, fill, vel;\n\t x -= radius;\n\t y -= radius;\n\t stroke = lineColor === undefined ? ' stroked=\"false\" ' : ' strokeWeight=\"' + lineWidth + 'px\" strokeColor=\"' + lineColor + '\" ';\n\t fill = fillColor === undefined ? ' filled=\"false\"' : ' fillColor=\"' + fillColor + '\" filled=\"true\" ';\n\t vel = '';\n\t return vel;\n\t\n\t },\n\t\n\t _drawPieSlice: function (shapeid, x, y, radius, startAngle, endAngle, lineColor, fillColor) {\n\t var vpath, startx, starty, endx, endy, stroke, fill, vel;\n\t if (startAngle === endAngle) {\n\t return ''; // VML seems to have problem when start angle equals end angle.\n\t }\n\t if ((endAngle - startAngle) === (2 * Math.PI)) {\n\t startAngle = 0.0; // VML seems to have a problem when drawing a full circle that doesn't start 0\n\t endAngle = (2 * Math.PI);\n\t }\n\t\n\t startx = x + Math.round(Math.cos(startAngle) * radius);\n\t starty = y + Math.round(Math.sin(startAngle) * radius);\n\t endx = x + Math.round(Math.cos(endAngle) * radius);\n\t endy = y + Math.round(Math.sin(endAngle) * radius);\n\t\n\t if (startx === endx && starty === endy) {\n\t if ((endAngle - startAngle) < Math.PI) {\n\t // Prevent very small slices from being mistaken as a whole pie\n\t return '';\n\t }\n\t // essentially going to be the entire circle, so ignore startAngle\n\t startx = endx = x + radius;\n\t starty = endy = y;\n\t }\n\t\n\t if (startx === endx && starty === endy && (endAngle - startAngle) < Math.PI) {\n\t return '';\n\t }\n\t\n\t vpath = [x - radius, y - radius, x + radius, y + radius, startx, starty, endx, endy];\n\t stroke = lineColor === undefined ? ' stroked=\"false\" ' : ' strokeWeight=\"1px\" strokeColor=\"' + lineColor + '\" ';\n\t fill = fillColor === undefined ? ' filled=\"false\"' : ' fillColor=\"' + fillColor + '\" filled=\"true\" ';\n\t vel = '' +\n\t ' ';\n\t return vel;\n\t },\n\t\n\t _drawRect: function (shapeid, x, y, width, height, lineColor, fillColor) {\n\t return this._drawShape(shapeid, [[x, y], [x, y + height], [x + width, y + height], [x + width, y], [x, y]], lineColor, fillColor);\n\t },\n\t\n\t reset: function () {\n\t this.group.innerHTML = '';\n\t },\n\t\n\t appendShape: function (shape) {\n\t var vel = this['_draw' + shape.type].apply(this, shape.args);\n\t if (this.rendered) {\n\t this.group.insertAdjacentHTML('beforeEnd', vel);\n\t } else {\n\t this.prerender += vel;\n\t }\n\t this.lastShapeId = shape.id;\n\t return shape.id;\n\t },\n\t\n\t replaceWithShape: function (shapeid, shape) {\n\t var existing = $('#jqsshape' + shapeid),\n\t vel = this['_draw' + shape.type].apply(this, shape.args);\n\t existing[0].outerHTML = vel;\n\t },\n\t\n\t replaceWithShapes: function (shapeids, shapes) {\n\t // replace the first shapeid with all the new shapes then toast the remaining old shapes\n\t var existing = $('#jqsshape' + shapeids[0]),\n\t replace = '',\n\t slen = shapes.length,\n\t i;\n\t for (i = 0; i < slen; i++) {\n\t replace += this['_draw' + shapes[i].type].apply(this, shapes[i].args);\n\t }\n\t existing[0].outerHTML = replace;\n\t for (i = 1; i < shapeids.length; i++) {\n\t $('#jqsshape' + shapeids[i]).remove();\n\t }\n\t },\n\t\n\t insertAfterShape: function (shapeid, shape) {\n\t var existing = $('#jqsshape' + shapeid),\n\t vel = this['_draw' + shape.type].apply(this, shape.args);\n\t existing[0].insertAdjacentHTML('afterEnd', vel);\n\t },\n\t\n\t removeShapeId: function (shapeid) {\n\t var existing = $('#jqsshape' + shapeid);\n\t this.group.removeChild(existing[0]);\n\t },\n\t\n\t getShapeAt: function (el, x, y) {\n\t var shapeid = el.id.substr(8);\n\t return shapeid;\n\t },\n\t\n\t render: function () {\n\t if (!this.rendered) {\n\t // batch the intial render into a single repaint\n\t this.group.innerHTML = this.prerender;\n\t this.rendered = true;\n\t }\n\t }\n\t });\n\t\n\t}))}(document, Math));\n\n/***/ },\n/* 94 */,\n/* 95 */\n/***/ function(module, exports, __webpack_require__) {\n\n\t/* WEBPACK VAR INJECTION */(function(jQuery) {/*!\n\t * jQuery Migrate - v1.4.0 - 2016-02-26\n\t * Copyright jQuery Foundation and other contributors\n\t */\n\t(function( jQuery, window, undefined ) {\n\t// See http://bugs.jquery.com/ticket/13335\n\t// \"use strict\";\n\t\n\t\n\tjQuery.migrateVersion = \"1.4.0\";\n\t\n\t\n\tvar warnedAbout = {};\n\t\n\t// List of warnings already given; public read only\n\tjQuery.migrateWarnings = [];\n\t\n\t// Set to true to prevent console output; migrateWarnings still maintained\n\t// jQuery.migrateMute = false;\n\t\n\t// Show a message on the console so devs know we're active\n\tif ( window.console && window.console.log ) {\n\t\twindow.console.log( \"JQMIGRATE: Migrate is installed\" +\n\t\t\t( jQuery.migrateMute ? \"\" : \" with logging active\" ) +\n\t\t\t\", version \" + jQuery.migrateVersion );\n\t}\n\t\n\t// Set to false to disable traces that appear with warnings\n\tif ( jQuery.migrateTrace === undefined ) {\n\t\tjQuery.migrateTrace = true;\n\t}\n\t\n\t// Forget any warnings we've already given; public\n\tjQuery.migrateReset = function() {\n\t\twarnedAbout = {};\n\t\tjQuery.migrateWarnings.length = 0;\n\t};\n\t\n\tfunction migrateWarn( msg) {\n\t\tvar console = window.console;\n\t\tif ( !warnedAbout[ msg ] ) {\n\t\t\twarnedAbout[ msg ] = true;\n\t\t\tjQuery.migrateWarnings.push( msg );\n\t\t\tif ( console && console.warn && !jQuery.migrateMute ) {\n\t\t\t\tconsole.warn( \"JQMIGRATE: \" + msg );\n\t\t\t\tif ( jQuery.migrateTrace && console.trace ) {\n\t\t\t\t\tconsole.trace();\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\t\n\tfunction migrateWarnProp( obj, prop, value, msg ) {\n\t\tif ( Object.defineProperty ) {\n\t\t\t// On ES5 browsers (non-oldIE), warn if the code tries to get prop;\n\t\t\t// allow property to be overwritten in case some other plugin wants it\n\t\t\ttry {\n\t\t\t\tObject.defineProperty( obj, prop, {\n\t\t\t\t\tconfigurable: true,\n\t\t\t\t\tenumerable: true,\n\t\t\t\t\tget: function() {\n\t\t\t\t\t\tmigrateWarn( msg );\n\t\t\t\t\t\treturn value;\n\t\t\t\t\t},\n\t\t\t\t\tset: function( newValue ) {\n\t\t\t\t\t\tmigrateWarn( msg );\n\t\t\t\t\t\tvalue = newValue;\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t} catch( err ) {\n\t\t\t\t// IE8 is a dope about Object.defineProperty, can't warn there\n\t\t\t}\n\t\t}\n\t\n\t\t// Non-ES5 (or broken) browser; just set the property\n\t\tjQuery._definePropertyBroken = true;\n\t\tobj[ prop ] = value;\n\t}\n\t\n\tif ( document.compatMode === \"BackCompat\" ) {\n\t\t// jQuery has never supported or tested Quirks Mode\n\t\tmigrateWarn( \"jQuery is not compatible with Quirks Mode\" );\n\t}\n\t\n\t\n\tvar attrFn = jQuery( \"\", { size: 1 } ).attr(\"size\") && jQuery.attrFn,\n\t\toldAttr = jQuery.attr,\n\t\tvalueAttrGet = jQuery.attrHooks.value && jQuery.attrHooks.value.get ||\n\t\t\tfunction() { return null; },\n\t\tvalueAttrSet = jQuery.attrHooks.value && jQuery.attrHooks.value.set ||\n\t\t\tfunction() { return undefined; },\n\t\trnoType = /^(?:input|button)$/i,\n\t\trnoAttrNodeType = /^[238]$/,\n\t\trboolean = /^(?:autofocus|autoplay|async|checked|controls|defer|disabled|hidden|loop|multiple|open|readonly|required|scoped|selected)$/i,\n\t\truseDefault = /^(?:checked|selected)$/i;\n\t\n\t// jQuery.attrFn\n\tmigrateWarnProp( jQuery, \"attrFn\", attrFn || {}, \"jQuery.attrFn is deprecated\" );\n\t\n\tjQuery.attr = function( elem, name, value, pass ) {\n\t\tvar lowerName = name.toLowerCase(),\n\t\t\tnType = elem && elem.nodeType;\n\t\n\t\tif ( pass ) {\n\t\t\t// Since pass is used internally, we only warn for new jQuery\n\t\t\t// versions where there isn't a pass arg in the formal params\n\t\t\tif ( oldAttr.length < 4 ) {\n\t\t\t\tmigrateWarn(\"jQuery.fn.attr( props, pass ) is deprecated\");\n\t\t\t}\n\t\t\tif ( elem && !rnoAttrNodeType.test( nType ) &&\n\t\t\t\t(attrFn ? name in attrFn : jQuery.isFunction(jQuery.fn[name])) ) {\n\t\t\t\treturn jQuery( elem )[ name ]( value );\n\t\t\t}\n\t\t}\n\t\n\t\t// Warn if user tries to set `type`, since it breaks on IE 6/7/8; by checking\n\t\t// for disconnected elements we don't warn on $( \"