diff --git a/lib/galaxy/webapps/galaxy/api/jobs.py b/lib/galaxy/webapps/galaxy/api/jobs.py index 21714f8dd577..dddecf1e9a2e 100644 --- a/lib/galaxy/webapps/galaxy/api/jobs.py +++ b/lib/galaxy/webapps/galaxy/api/jobs.py @@ -9,9 +9,12 @@ from six import string_types from sqlalchemy import or_ -from galaxy import exceptions -from galaxy import model -from galaxy import util +from galaxy import ( + exceptions, + model, + util, +) +from galaxy.managers import hdas from galaxy.managers.jobs import ( JobManager, JobSearch, @@ -39,6 +42,7 @@ def __init__(self, app): super(JobController, self).__init__(app) self.job_manager = JobManager(app) self.job_search = JobSearch(app) + self.hda_manager = hdas.HDAManager(app) @expose_api def index(self, trans, **kwd): @@ -374,10 +378,7 @@ def __dictify_association(self, trans, job_dataset_association): def __get_job(self, trans, job_id=None, dataset_id=None, **kwd): if job_id is not None: - try: - decoded_job_id = self.decode_id(job_id) - except Exception: - raise exceptions.MalformedId() + decoded_job_id = self.decode_id(job_id) return self.job_manager.get_accessible_job(trans, decoded_job_id) else: hda_ldda = kwd.get("hda_ldda", "hda") @@ -410,12 +411,12 @@ def search(self, trans, payload, **kwd): """ tool_id = payload.get('tool_id') if tool_id is None: - raise exceptions.ObjectAttributeMissingException("No tool id") + raise exceptions.RequestParameterMissingException("No tool id") tool = trans.app.toolbox.get_tool(tool_id) if tool is None: raise exceptions.ObjectNotFound("Requested tool not found") if 'inputs' not in payload: - raise exceptions.ObjectAttributeMissingException("No inputs defined") + raise exceptions.RequestParameterMissingException("No inputs defined") inputs = payload.get('inputs', {}) # Find files coming in as multipart file data and add to inputs. for k, v in payload.items(): @@ -439,7 +440,7 @@ def search(self, trans, payload, **kwd): return [self.encode_all_ids(trans, single_job.to_dict('element'), True) for single_job in jobs] @expose_api_anonymous - def error(self, trans, id, **kwd): + def error(self, trans, id, payload, **kwd): """ error( trans, id ) * POST /api/jobs/{id}/error @@ -452,16 +453,18 @@ def error(self, trans, id, **kwd): :returns: dictionary containing information regarding where the error report was sent. """ # Get dataset on which this error was triggered - try: - decoded_dataset_id = self.decode_id(kwd['dataset_id']) - except Exception: - raise exceptions.MalformedId() - dataset = trans.sa_session.query(trans.app.model.HistoryDatasetAssociation).get(decoded_dataset_id) + dataset_id = payload.get('dataset_id') + if not dataset_id: + raise exceptions.RequestParameterMissingException('No dataset_id') + decoded_dataset_id = self.decode_id(dataset_id) + dataset = self.hda_manager.get_accessible(decoded_dataset_id, trans.user) # Get job job = self.__get_job(trans, id) + if dataset.creating_job.id != job.id: + raise exceptions.RequestParameterInvalidException('dataset_id was not created by job_id') tool = trans.app.toolbox.get_tool(job.tool_id, tool_version=job.tool_version) or None - email = kwd.get('email') + email = payload.get('email') if not email and not trans.anonymous: email = trans.user.email messages = trans.app.error_reports.default_error_plugin.submit_report( @@ -471,7 +474,7 @@ def error(self, trans, id, **kwd): user_submission=True, user=trans.user, email=email, - message=kwd.get('message') + message=payload.get('message') ) return {'messages': messages} diff --git a/lib/galaxy_test/api/test_jobs.py b/lib/galaxy_test/api/test_jobs.py index e0e48767447d..9e6c445e496f 100644 --- a/lib/galaxy_test/api/test_jobs.py +++ b/lib/galaxy_test/api/test_jobs.py @@ -53,7 +53,7 @@ def test_index_state_filter(self, history_id): # Verify number of ok jobs is actually greater. count_increased = False - for i in range(10): + for _ in range(10): new_count = len(self.__uploads_with_state("ok")) if original_count < new_count: count_increased = True @@ -228,10 +228,11 @@ def test_report_error(self): ) run_response = self._post("tools", data=payload).json() job_id = run_response['jobs'][0]["id"] + self.dataset_populator.wait_for_job(job_id) dataset_id = run_response['outputs'][0]['id'] response = self._post('jobs/%s/error' % job_id, data={'dataset_id': dataset_id}) - assert response.status_code == 200 + assert response.status_code == 200, response.text @skip_without_tool('detect_errors_aggressive') def test_report_error_anon(self): @@ -243,9 +244,9 @@ def test_report_error_anon(self): job_id = run_response['jobs'][0]["id"] dataset_id = run_response['outputs'][0]['id'] response = requests.post('%s/jobs/%s/error' % (self.galaxy_interactor.api_url, job_id), - params={'email': 'someone@domain.com', 'dataset_id': dataset_id}, + data={'email': 'someone@domain.com', 'dataset_id': dataset_id}, cookies=cookies) - assert response.status_code == 200 + assert response.status_code == 200, response.text @uses_test_history(require_new=True) def test_deleting_output_keep_running_until_all_deleted(self, history_id): @@ -597,7 +598,7 @@ def _search_payload(self, history_id, tool_id, inputs, state='ok'): def _search(self, payload, expected_search_count=1): # in case job and history aren't updated at exactly the same # time give time to wait - for i in range(5): + for _ in range(5): search_count = self._search_count(payload) if search_count == expected_search_count: break diff --git a/lib/galaxy_test/api/test_search.py b/lib/galaxy_test/api/test_search.py index 80034e9f8fa8..a6f75fc44623 100644 --- a/lib/galaxy_test/api/test_search.py +++ b/lib/galaxy_test/api/test_search.py @@ -10,14 +10,14 @@ def test_search_workflows(self): workflow_populator = WorkflowPopulator(self.galaxy_interactor) workflow_id = workflow_populator.simple_workflow("test_for_search") search_response = self.__search("select * from workflow") - assert self.__has_result_with_name(search_response, "test_for_search"), search_response.json() + assert self.__has_result_with_name(search_response, "test_for_search"), search_response.text # Deleted delete_url = self._api_url("workflows/%s" % workflow_id, use_key=True) delete(delete_url) search_response = self.__search("select * from workflow where deleted = False") - assert not self.__has_result_with_name(search_response, "test_for_search"), search_response.json() + assert not self.__has_result_with_name(search_response, "test_for_search"), search_response.text def __search(self, query): data = dict(query=query) diff --git a/lib/galaxy_test/api/test_tools.py b/lib/galaxy_test/api/test_tools.py index 06c38cea586e..4bed984c8d24 100644 --- a/lib/galaxy_test/api/test_tools.py +++ b/lib/galaxy_test/api/test_tools.py @@ -235,12 +235,12 @@ def _show_valid_tool(self, tool_id, tool_version=None): @skip_without_tool("composite_output") def test_test_data_filepath_security(self): test_data_response = self._get("tools/%s/test_data_path?filename=../CONTRIBUTORS.md" % "composite_output", admin=True) - assert test_data_response.status_code == 404, test_data_response.json() + assert test_data_response.status_code == 404, test_data_response.text @skip_without_tool("composite_output") def test_test_data_admin_security(self): test_data_response = self._get("tools/%s/test_data_path?filename=../CONTRIBUTORS.md" % "composite_output") - assert test_data_response.status_code == 403, test_data_response.json() + assert test_data_response.status_code == 403, test_data_response.text @skip_without_tool("composite_output") def test_test_data_composite_output(self): @@ -288,7 +288,7 @@ def test_test_data_download(self): @skip_without_tool("composite_output") def test_test_data_downloads_security(self): test_data_response = self._get("tools/%s/test_data_download?filename=../CONTRIBUTORS.md" % "composite_output") - assert test_data_response.status_code == 404, test_data_response.json() + assert test_data_response.status_code == 404, test_data_response.text @skip_without_tool("composite_output") def test_test_data_download_composite(self): @@ -2174,7 +2174,9 @@ def _run_outputs(self, create_response): def _run_cat1(self, history_id, inputs, assert_ok=False, **kwargs): return self._run('cat1', history_id, inputs, assert_ok=assert_ok, **kwargs) - def _run(self, tool_id=None, history_id=None, inputs={}, tool_uuid=None, assert_ok=False, tool_version=None, use_cached_job=False, wait_for_job=False): + def _run(self, tool_id=None, history_id=None, inputs=None, tool_uuid=None, assert_ok=False, tool_version=None, use_cached_job=False, wait_for_job=False): + if inputs is None: + inputs = {} if tool_id is None: assert tool_uuid is not None payload = self.dataset_populator.run_tool_payload( diff --git a/lib/galaxy_test/base/populators.py b/lib/galaxy_test/base/populators.py index 1c1485845d6b..a04810d59ae1 100644 --- a/lib/galaxy_test/base/populators.py +++ b/lib/galaxy_test/base/populators.py @@ -306,7 +306,7 @@ def _create_tool_raw(self, payload): create_response = self._post("dynamic_tools", data=payload, admin=True) except TypeError: create_response = self._post("dynamic_tools", data=payload) - assert create_response.status_code == 200, create_response.json() + assert create_response.status_code == 200, create_response.text return create_response.json() def list_dynamic_tools(self): @@ -352,7 +352,7 @@ def wrap_up(): def new_history(self, **kwds): name = kwds.get("name", "API Test History") create_history_response = self._post("histories", data=dict(name=name)) - assert "id" in create_history_response.json(), create_history_response.json() + assert "id" in create_history_response.json(), create_history_response.text history_id = create_history_response.json()["id"] return history_id @@ -1461,7 +1461,7 @@ def read_test_data(test_dict): return inputs, label_map, has_uploads -def wait_on_state(state_func, desc="state", skip_states=["running", "queued", "new", "ready"], assert_ok=False, timeout=DEFAULT_TIMEOUT): +def wait_on_state(state_func, desc="state", skip_states=None, assert_ok=False, timeout=DEFAULT_TIMEOUT): def get_state(): response = state_func() assert response.status_code == 200, "Failed to fetch state update while waiting. [%s]" % response.content @@ -1472,6 +1472,9 @@ def get_state(): if assert_ok: assert state == "ok", "Final state - %s - not okay." % state return state + + if skip_states is None: + skip_states = ["running", "queued", "new", "ready"] try: return wait_on(get_state, desc=desc, timeout=timeout) except TimeoutAssertionError as e: @@ -1492,20 +1495,25 @@ def _api_url(self): def _get(self, route, data=None): if data is None: data = {} - return self._gi.make_get_request(self._url(route), data=data) - def _post(self, route, data={}): + def _post(self, route, data=None): + if data is None: + data = {} data = data.copy() data['key'] = self._gi.key return requests.post(self._url(route), data=data) - def _put(self, route, data={}): + def _put(self, route, data=None): + if data is None: + data = {} data = data.copy() data['key'] = self._gi.key return requests.put(self._url(route), data=data) - def _delete(self, route, data={}): + def _delete(self, route, data=None): + if data is None: + data = {} data = data.copy() data['key'] = self._gi.key return requests.delete(self._url(route), data=data) diff --git a/lib/galaxy_test/selenium/test_history_sharing.py b/lib/galaxy_test/selenium/test_history_sharing.py index db09e53d006b..bcd784b746eb 100644 --- a/lib/galaxy_test/selenium/test_history_sharing.py +++ b/lib/galaxy_test/selenium/test_history_sharing.py @@ -14,14 +14,14 @@ def test_sharing_valid(self): user1_email, user2_email, history_id = self.setup_two_users_with_one_shared_history() self.submit_login(user2_email, retries=VALID_LOGIN_RETRIES) response = self.api_get("histories/%s" % history_id, raw=True) - assert response.status_code == 200, response.json() + assert response.status_code == 200, response.text @selenium_test def test_sharing_valid_by_id(self): user1_email, user2_email, history_id = self.setup_two_users_with_one_shared_history(share_by_id=True) self.submit_login(user2_email, retries=VALID_LOGIN_RETRIES) response = self.api_get("histories/%s" % history_id, raw=True) - assert response.status_code == 200, response.json() + assert response.status_code == 200, response.text @selenium_test def test_unsharing(self):