Skip to content

Commit

Permalink
Merge pull request #41 from jmchilton/BioComputeDev
Browse files Browse the repository at this point in the history
Testing and docs update.
  • Loading branch information
HadleyKing authored Sep 9, 2020
2 parents ecf1a5c + de95203 commit f452239
Show file tree
Hide file tree
Showing 3 changed files with 32 additions and 8 deletions.
13 changes: 12 additions & 1 deletion lib/galaxy/webapps/galaxy/api/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -1238,7 +1238,13 @@ def _generate_invocation_bco(self, trans, invocation_id, **kwd):
def export_invocation_bco(self, trans, invocation_id, **kwd):
'''
GET /api/invocations/{invocations_id}/biocompute
Return a BioCompute Object for the workflow invocation.
The BioCompute Object endpoints are in beta - important details such
as how inputs and outputs are represented, how the workflow is encoded,
and how author and version information is encoded, and how URLs are
generated will very likely change in important ways over time.
'''
return self._generate_invocation_bco(trans, invocation_id, **kwd)

Expand All @@ -1247,8 +1253,13 @@ def download_invocation_bco(self, trans, invocation_id, **kwd):
"""
GET /api/invocations/{invocations_id}/biocompute/download
Returns a selected BioCompute Object.
Returns a selected BioCompute Object as a file for download (HTTP
headers configured with filename and such).
The BioCompute Object endpoints are in beta - important details such
as how inputs and outputs are represented, how the workflow is encoded,
and how author and version information is encoded, and how URLs are
generated will very likely change in important ways over time.
"""
ret_dict = self._generate_invocation_bco(trans, invocation_id, **kwd)
trans.response.headers["Content-Disposition"] = 'attachment; filename="bco_%s.json"' % invocation_id
Expand Down
12 changes: 5 additions & 7 deletions lib/galaxy_test/api/test_workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -1195,6 +1195,9 @@ def test_run_subworkflow_simple(self):
assert [step for step in subworkflow_invocation['steps'] if step['workflow_step_label'] == 'inner_input']
assert [step for step in subworkflow_invocation['steps'] if step['workflow_step_label'] == 'random_lines']

bco = self.workflow_populator.get_biocompute_object(run_response.invocation_id)
self.workflow_populator.validate_biocompute_object(bco)

@skip_without_tool("random_lines1")
def test_run_subworkflow_runtime_parameters(self):
with self.dataset_populator.test_history() as history_id:
Expand Down Expand Up @@ -1437,14 +1440,9 @@ def test_export_invocation_bco(self):
with self.dataset_populator.test_history() as history_id:
summary = self._run_jobs(WORKFLOW_SIMPLE, test_data={"input1": "hello world"}, history_id=history_id)
invocation_id = summary.invocation_id
bco = self._get("invocations/%s/biocompute" % invocation_id).json()
self._assert_has_keys(bco, "object_id", "spec_version", "etag", "provenance_domain", "usability_domain", "description_domain", "execution_domain", "parametric_domain", "io_domain", "error_domain")
bco = self.workflow_populator.get_biocompute_object(invocation_id)
self.workflow_populator.validate_biocompute_object(bco)
self.assertEqual(bco['provenance_domain']['name'], "Simple Workflow")
self._assert_has_keys(bco['description_domain'], "keywords", "xref", "platform", "pipeline_steps")
self._assert_has_keys(bco['execution_domain'], "script_access_type", "script", "script_driver", "software_prerequisites", "external_data_endpoints", "environment_variables")
for p in bco['parametric_domain']:
self._assert_has_keys(p, "param", "value", "step")
self._assert_has_keys(bco['io_domain'], "input_subdomain", "output_subdomain")

@skip_without_tool("__APPLY_RULES__")
def test_workflow_run_apply_rules(self):
Expand Down
15 changes: 15 additions & 0 deletions lib/galaxy_test/base/populators.py
Original file line number Diff line number Diff line change
Expand Up @@ -813,6 +813,21 @@ def get_invocation(self, invocation_id):
r.raise_for_status()
return r.json()

def get_biocompute_object(self, invocation_id):
bco_response = self._get("invocations/%s/biocompute" % invocation_id)
bco_response.raise_for_status()
return bco_response.json()

def validate_biocompute_object(self, bco, expected_schema_version='https://w3id.org/ieee/ieee-2791-schema/2791object.json'):
# TODO: actually use jsonref and jsonschema to validate this someday
api_asserts.assert_has_keys(bco, "object_id", "spec_version", "etag", "provenance_domain", "usability_domain", "description_domain", "execution_domain", "parametric_domain", "io_domain", "error_domain")
assert bco['spec_version'] == expected_schema_version
api_asserts.assert_has_keys(bco['description_domain'], "keywords", "xref", "platform", "pipeline_steps")
api_asserts.assert_has_keys(bco['execution_domain'], "script_access_type", "script", "script_driver", "software_prerequisites", "external_data_endpoints", "environment_variables")
for p in bco['parametric_domain']:
api_asserts.assert_has_keys(p, "param", "value", "step")
api_asserts.assert_has_keys(bco['io_domain'], "input_subdomain", "output_subdomain")

def invoke_workflow_raw(self, workflow_id, request):
url = "workflows/%s/usage" % (workflow_id)
invocation_response = self._post(url, data=request)
Expand Down

0 comments on commit f452239

Please sign in to comment.