diff --git a/client/src/components/FilesDialog/FilesInput.vue b/client/src/components/FilesDialog/FilesInput.vue new file mode 100644 index 000000000000..a56a184af73f --- /dev/null +++ b/client/src/components/FilesDialog/FilesInput.vue @@ -0,0 +1,54 @@ + + + diff --git a/client/src/components/History/HistoryDetails.vue b/client/src/components/History/HistoryDetails.vue index d85c0429fbd8..148cf9047e07 100644 --- a/client/src/components/History/HistoryDetails.vue +++ b/client/src/components/History/HistoryDetails.vue @@ -96,7 +96,7 @@ key="export-history-to-file" title="Export History to File" icon="fas fa-file-archive" - @click="iframeRedirect('/history/export_archive?preview=True')" + @click="backboneRoute(`/histories/${history.id}/export`)" /> diff --git a/client/src/components/HistoryExport/Index.test.js b/client/src/components/HistoryExport/Index.test.js new file mode 100644 index 000000000000..6d133bbbdcb2 --- /dev/null +++ b/client/src/components/HistoryExport/Index.test.js @@ -0,0 +1,18 @@ +import { shallowMount } from "@vue/test-utils"; +import Index from "./Index.vue"; +import { getLocalVue } from "jest/helpers"; + +const localVue = getLocalVue(); + +describe("Index.vue", () => { + it("should render tabs", () => { + // just make sure the component renders to catch obvious big errors + const wrapper = shallowMount(Index, { + propsData: { + historyId: "test_id", + }, + localVue, + }); + expect(wrapper.exists("b-tabs-stub")).toBeTruthy(); + }); +}); diff --git a/client/src/components/HistoryExport/Index.vue b/client/src/components/HistoryExport/Index.vue new file mode 100644 index 000000000000..3c9f2bb1e503 --- /dev/null +++ b/client/src/components/HistoryExport/Index.vue @@ -0,0 +1,39 @@ + + + diff --git a/client/src/components/HistoryExport/ToLink.test.js b/client/src/components/HistoryExport/ToLink.test.js new file mode 100644 index 000000000000..9d3d1a1a5925 --- /dev/null +++ b/client/src/components/HistoryExport/ToLink.test.js @@ -0,0 +1,51 @@ +import { shallowMount } from "@vue/test-utils"; +import { getLocalVue } from "jest/helpers"; +import ToLink from "./ToLink.vue"; +import flushPromises from "flush-promises"; +import MockAdapter from "axios-mock-adapter"; +import axios from "axios"; + +const localVue = getLocalVue(); +const TEST_HISTORY_ID = "hist1235"; +const TEST_EXPORTS_URL = `/api/histories/${TEST_HISTORY_ID}/exports`; + +describe("ToLink.vue", () => { + let axiosMock; + let wrapper; + + async function mountWithInitialExports(exports) { + axiosMock.onGet(TEST_EXPORTS_URL).reply(200, exports); + wrapper = shallowMount(ToLink, { + propsData: { + historyId: TEST_HISTORY_ID, + }, + localVue, + }); + await wrapper.vm.$nextTick(); + expect(wrapper.find("loading-span-stub").exists()).toBeTruthy(); + await flushPromises(); + } + + beforeEach(async () => { + axiosMock = new MockAdapter(axios); + }); + + it("should display a link if no exports ever generated", async () => { + await mountWithInitialExports([]); + expect(wrapper.find(".export-link")).toBeTruthy(); + expect(wrapper.find("loading-span-stub").exists()).toBeFalsy(); // loading span gone + }); + + it("should start polling if latest export is preparing", async () => { + await mountWithInitialExports([ + { + preparing: true, + }, + ]); + expect(wrapper.find("loading-span-stub").attributes("message")).toContain("preparing"); + }); + + afterEach(() => { + axiosMock.restore(); + }); +}); diff --git a/client/src/components/HistoryExport/ToLink.vue b/client/src/components/HistoryExport/ToLink.vue new file mode 100644 index 000000000000..de9857db0d67 --- /dev/null +++ b/client/src/components/HistoryExport/ToLink.vue @@ -0,0 +1,148 @@ + + + diff --git a/client/src/components/HistoryExport/ToRemoteFile.test.js b/client/src/components/HistoryExport/ToRemoteFile.test.js new file mode 100644 index 000000000000..275d2076bab6 --- /dev/null +++ b/client/src/components/HistoryExport/ToRemoteFile.test.js @@ -0,0 +1,72 @@ +import { shallowMount } from "@vue/test-utils"; +import { getLocalVue } from "jest/helpers"; +import ToRemoteFile from "./ToRemoteFile.vue"; +import MockAdapter from "axios-mock-adapter"; +import axios from "axios"; +import flushPromises from "flush-promises"; +import { waitOnJob } from "components/JobStates/wait"; + +const localVue = getLocalVue(); +const TEST_HISTORY_ID = "hist1235"; +const TEST_JOB_ID = "job123789"; +const TEST_EXPORTS_URL = `/api/histories/${TEST_HISTORY_ID}/exports`; + +jest.mock("components/JobStates/wait"); + +describe("ToRemoteFile.vue", () => { + let axiosMock; + let wrapper; + + beforeEach(async () => { + axiosMock = new MockAdapter(axios); + wrapper = shallowMount(ToRemoteFile, { + propsData: { + historyId: TEST_HISTORY_ID, + }, + localVue, + }); + }); + + it("should render a form with export disable because inputs empty", async () => { + expect(wrapper.find(".export-button").exists()).toBeTruthy(); + expect(wrapper.find(".export-button").attributes("disabled")).toBeTruthy(); + expect(wrapper.vm.canExport).toBeFalsy(); + }); + + it("should allow export when name and directory available", async () => { + await wrapper.setData({ + name: "export.tar.gz", + directory: "gxfiles://", + }); + expect(wrapper.vm.directory).toEqual("gxfiles://"); + expect(wrapper.vm.name).toEqual("export.tar.gz"); + expect(wrapper.vm.canExport).toBeTruthy(); + }); + + it("should issue export PUT request on export", async () => { + await wrapper.setData({ + name: "export.tar.gz", + directory: "gxfiles://", + }); + let request; + axiosMock.onPut(TEST_EXPORTS_URL).reply((request_) => { + request = request_; + return [200, { job_id: TEST_JOB_ID }]; + }); + waitOnJob.mockReturnValue( + new Promise((then) => { + then({ state: "ok" }); + }) + ); + wrapper.vm.doExport(); + await flushPromises(); + const putData = JSON.parse(request.data); + expect(putData.directory_uri).toEqual("gxfiles://"); + expect(putData.file_name).toEqual("export.tar.gz"); + expect(wrapper.find("b-alert-stub").attributes("variant")).toEqual("success"); + }); + + afterEach(() => { + axiosMock.restore(); + }); +}); diff --git a/client/src/components/HistoryExport/ToRemoteFile.vue b/client/src/components/HistoryExport/ToRemoteFile.vue new file mode 100644 index 000000000000..566f9099afef --- /dev/null +++ b/client/src/components/HistoryExport/ToRemoteFile.vue @@ -0,0 +1,119 @@ + + + diff --git a/client/src/components/HistoryExport/index.js b/client/src/components/HistoryExport/index.js new file mode 100644 index 000000000000..d1e4f95783fe --- /dev/null +++ b/client/src/components/HistoryExport/index.js @@ -0,0 +1 @@ +export { default as HistoryExport } from "./Index.vue"; diff --git a/client/src/components/HistoryImport.test.js b/client/src/components/HistoryImport.test.js new file mode 100644 index 000000000000..8c3c4bf20080 --- /dev/null +++ b/client/src/components/HistoryImport.test.js @@ -0,0 +1,79 @@ +import { shallowMount } from "@vue/test-utils"; +import { getLocalVue } from "jest/helpers"; +import HistoryImport from "./HistoryImport.vue"; +import MockAdapter from "axios-mock-adapter"; +import axios from "axios"; +import flushPromises from "flush-promises"; +import { waitOnJob } from "components/JobStates/wait"; + +const localVue = getLocalVue(); +const TEST_JOB_ID = "job123789"; +const TEST_HISTORY_URI = "/api/histories"; +const TEST_SOURCE_URL = "http://galaxy.example/import"; + +jest.mock("components/JobStates/wait"); + +describe("HistoryImport.vue", () => { + let axiosMock; + let wrapper; + + beforeEach(async () => { + axiosMock = new MockAdapter(axios); + wrapper = shallowMount(HistoryImport, { + propsData: {}, + localVue, + }); + }); + + it("should render a form with submit disabled because inputs empty", async () => { + expect(wrapper.find(".import-button").exists()).toBeTruthy(); + expect(wrapper.find(".import-button").attributes("disabled")).toBeTruthy(); + expect(wrapper.vm.importReady).toBeFalsy(); + }); + + it("should allow import when URL available", async () => { + await wrapper.setData({ + sourceURL: TEST_SOURCE_URL, + }); + expect(wrapper.vm.importReady).toBeTruthy(); + }); + + it("should require an URI if that is the import type", async () => { + await wrapper.setData({ + sourceURL: TEST_SOURCE_URL, + importType: "sourceRemoteFilesUri", + }); + expect(wrapper.vm.importReady).toBeFalsy(); + }); + + it("should post to create a new history and wait on job when submitted", async () => { + await wrapper.setData({ + sourceURL: TEST_SOURCE_URL, + }); + let formData; + axiosMock.onPost(TEST_HISTORY_URI).reply((request) => { + formData = request.data; + return [200, { job_id: TEST_JOB_ID }]; + }); + let then; + waitOnJob.mockReturnValue( + new Promise((then_) => { + then = then_; + }) + ); + wrapper.vm.submit(); + await flushPromises(); + expect(formData.get("archive_source")).toBe(TEST_SOURCE_URL); + expect(wrapper.vm.waitingOnJob).toBeTruthy(); + + // complete job and make sure waitingOnJob is false and complete is true + then({ state: "ok" }); + await flushPromises(); + expect(wrapper.vm.waitingOnJob).toBeFalsy(); + expect(wrapper.vm.complete).toBeTruthy(); + }); + + afterEach(() => { + axiosMock.restore(); + }); +}); diff --git a/client/src/components/HistoryImport.vue b/client/src/components/HistoryImport.vue index 5a3314bd1124..9b6a6d2069bd 100644 --- a/client/src/components/HistoryImport.vue +++ b/client/src/components/HistoryImport.vue @@ -1,57 +1,158 @@ + + diff --git a/client/src/components/JobInformation/JobError.vue b/client/src/components/JobInformation/JobError.vue new file mode 100644 index 000000000000..5a91540213b0 --- /dev/null +++ b/client/src/components/JobInformation/JobError.vue @@ -0,0 +1,70 @@ + + + + + diff --git a/client/src/entry/analysis/AnalysisRouter.js b/client/src/entry/analysis/AnalysisRouter.js index 3cd82ff0e7d3..57b307aecb4c 100644 --- a/client/src/entry/analysis/AnalysisRouter.js +++ b/client/src/entry/analysis/AnalysisRouter.js @@ -32,6 +32,7 @@ import InteractiveTools from "components/InteractiveTools/InteractiveTools.vue"; import LibraryFolder from "components/LibraryFolder/LibraryFolder.vue"; import WorkflowList from "components/Workflow/WorkflowList.vue"; import HistoryImport from "components/HistoryImport.vue"; +import { HistoryExport } from "components/HistoryExport/index"; import HistoryView from "components/HistoryView.vue"; import WorkflowInvocationReport from "components/Workflow/InvocationReport.vue"; import WorkflowRun from "components/Workflow/Run/WorkflowRun.vue"; @@ -87,6 +88,7 @@ export const getAnalysisRouter = (Galaxy) => "(/)histories(/)rename(/)": "show_histories_rename", "(/)histories(/)sharing(/)": "show_histories_sharing", "(/)histories(/)import(/)": "show_histories_import", + "(/)histories(/)(:history_id)(/)export(/)": "show_history_export", "(/)histories(/)permissions(/)": "show_histories_permissions", "(/)histories/view": "show_history_view", "(/)histories/show_structure": "show_history_structure", @@ -251,6 +253,12 @@ export const getAnalysisRouter = (Galaxy) => this._display_vue_helper(HistoryImport); }, + show_history_export: function (history_id) { + this._display_vue_helper(HistoryExport, { + historyId: history_id, + }); + }, + show_tools_view: function () { this.page.toolPanel?.component.hide(); this.page.panels.right.hide(); diff --git a/client/src/mvc/history/job-states-model.js b/client/src/mvc/history/job-states-model.js index 06b745f51a33..599b45eac1d8 100644 --- a/client/src/mvc/history/job-states-model.js +++ b/client/src/mvc/history/job-states-model.js @@ -1,6 +1,7 @@ import Backbone from "backbone"; import { getAppRoot } from "onload/loadConfig"; import AJAX_QUEUE from "utils/ajax-queue"; +import axios from "axios"; /** ms between fetches when checking running jobs/datasets for updates */ var UPDATE_DELAY = 2000; @@ -208,4 +209,10 @@ var JobStatesSummaryCollection = Backbone.Collection.extend({ }, }); -export default { JobStatesSummary, JobStatesSummaryCollection, FETCH_STATE_ON_ADD, NON_TERMINAL_STATES, ERROR_STATES }; +export default { + JobStatesSummary, + JobStatesSummaryCollection, + FETCH_STATE_ON_ADD, + NON_TERMINAL_STATES, + ERROR_STATES, +}; diff --git a/client/src/mvc/history/options-menu.js b/client/src/mvc/history/options-menu.js index 3192641c2f37..3556ca5b8dba 100644 --- a/client/src/mvc/history/options-menu.js +++ b/client/src/mvc/history/options-menu.js @@ -148,8 +148,13 @@ var menu = [ }, { html: _l("Export History to File"), - href: "history/export_archive?preview=True", anon: true, + func: function () { + const Galaxy = getGalaxyInstance(); + if (Galaxy && Galaxy.currHistoryPanel && Galaxy.router) { + Galaxy.router.push(`/histories/${Galaxy.currHistoryPanel.model.id}/export`); + } + }, }, { html: _l("Beta Features"), diff --git a/client/src/utils/simple-error.js b/client/src/utils/simple-error.js index aa5714b8a99a..75b7ce3bc18a 100644 --- a/client/src/utils/simple-error.js +++ b/client/src/utils/simple-error.js @@ -1,5 +1,5 @@ -export function errorMessageAsString(e) { - let message = "Request failed."; +export function errorMessageAsString(e, defaultMessage = "Request failed.") { + let message = defaultMessage; if (e && e.response && e.response.data && e.response.data.err_msg) { message = e.response.data.err_msg; } else if (e && e.response) { diff --git a/lib/galaxy/managers/histories.py b/lib/galaxy/managers/histories.py index dc27068c6959..07c2976856af 100644 --- a/lib/galaxy/managers/histories.py +++ b/lib/galaxy/managers/histories.py @@ -169,6 +169,50 @@ def non_ready_jobs(self, history): return jobs +class HistoryExportView: + + def __init__(self, app): + self.app = app + + def get_exports(self, trans, history_id): + history = self._history(trans, history_id) + matching_exports = history.exports + return [self.serialize(trans, history_id, e) for e in matching_exports] + + def serialize(self, trans, history_id, jeha): + rval = jeha.to_dict() + api_url = self.app.url_for("history_archive_download", id=history_id, jeha_id=trans.security.encode_id(jeha.id)) + # this URL is less likely to be blocked by a proxy and require an API key, so export + # older-style controller version for use with within the GUI and such. + external_url = self.app.url_for(controller='history', action="export_archive", id=history_id, qualified=True) + rval["download_url"] = api_url + rval["external_download_url"] = external_url + rval = trans.security.encode_all_ids(rval) + return rval + + def get_ready_jeha(self, trans, history_id, jeha_id="latest"): + history = self._history(trans, history_id) + matching_exports = history.exports + if jeha_id != "latest": + decoded_jeha_id = trans.security.decode_id(jeha_id) + matching_exports = [e for e in matching_exports if e.id == decoded_jeha_id] + if len(matching_exports) == 0: + raise glx_exceptions.ObjectNotFound("Failed to find target history export") + + jeha = matching_exports[0] + if not jeha.ready: + raise glx_exceptions.MessageException("Export not available or not yet ready.") + + return jeha + + def _history(self, trans, history_id): + if history_id is not None: + history = self.app.history_manager.get_accessible(trans.security.decode_id(history_id), trans.user, current_history=trans.history) + else: + history = trans.history + return history + + class HistorySerializer(sharable.SharableModelSerializer, deletable.PurgableSerializerMixin): """ Interface/service object for serializing histories into dictionaries. diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 117182b2d7e7..5ede5cc45038 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -1667,6 +1667,15 @@ def create_for_history(history, job, sa_session, object_store, compressed): jeha.history_attrs_filename = history_attrs_filename return jeha + def to_dict(self): + return { + 'id': self.id, + 'job_id': self.job.id, + 'ready': self.ready, + 'preparing': self.preparing, + 'up_to_date': self.up_to_date, + } + class JobImportHistoryArchive(RepresentById): def __init__(self, job=None, history=None, archive_dir=None): diff --git a/lib/galaxy/webapps/base/controller.py b/lib/galaxy/webapps/base/controller.py index 4fc92b96a093..07d49494552d 100644 --- a/lib/galaxy/webapps/base/controller.py +++ b/lib/galaxy/webapps/base/controller.py @@ -445,7 +445,8 @@ def queue_history_import(self, trans, archive_type, archive_source): # Run job to do import. history_imp_tool = trans.app.toolbox.get_tool('__IMPORT_HISTORY__') incoming = {'__ARCHIVE_SOURCE__' : archive_source, '__ARCHIVE_TYPE__' : archive_type} - history_imp_tool.execute(trans, incoming=incoming) + job, _ = history_imp_tool.execute(trans, incoming=incoming) + return job class UsesLibraryMixin: diff --git a/lib/galaxy/webapps/galaxy/api/histories.py b/lib/galaxy/webapps/galaxy/api/histories.py index 274a8d48f984..6744440b658c 100644 --- a/lib/galaxy/webapps/galaxy/api/histories.py +++ b/lib/galaxy/webapps/galaxy/api/histories.py @@ -32,7 +32,6 @@ expose_api_anonymous, expose_api_anonymous_and_sessionless, expose_api_raw, - url_for ) from galaxy.webapps.base.controller import ( BaseAPIController, @@ -52,6 +51,7 @@ def __init__(self, app): self.user_manager = users.UserManager(app) self.workflow_manager = workflows.WorkflowsManager(app) self.manager = histories.HistoryManager(app) + self.history_export_view = histories.HistoryExportView(app) self.serializer = histories.HistorySerializer(app) self.deserializer = histories.HistoryDeserializer(app) self.filters = histories.HistoryFilters(app) @@ -333,8 +333,10 @@ def create(self, trans, payload, **kwd): archive_type = "file" else: raise exceptions.MessageException("Please provide a url or file.") - self.queue_history_import(trans, archive_type=archive_type, archive_source=archive_source) - return {"message": "Importing history from source '%s'. This history will be visible when the import is complete." % archive_source} + job = self.queue_history_import(trans, archive_type=archive_type, archive_source=archive_source) + job_dict = job.to_dict() + job_dict["message"] = "Importing history from source '%s'. This history will be visible when the import is complete." % archive_source + return trans.security.encode_all_ids(job_dict) new_history = None # if a history id was passed, copy that history @@ -451,7 +453,16 @@ def update(self, trans, id, payload, **kwd): user=trans.user, trans=trans, **self._parse_serialization_params(kwd, 'detailed')) @expose_api - def archive_export(self, trans, id, **kwds): + def index_exports(self, trans, id): + """ + index_exports(self, trans, id) + * GET /api/histories/{id}/exports: + Get history exports. + """ + return self.history_export_view.get_exports(trans, id) + + @expose_api + def archive_export(self, trans, id, payload=None, **kwds): """ export_archive(self, trans, id, payload) * PUT /api/histories/{id}/exports: @@ -464,6 +475,7 @@ def archive_export(self, trans, id, **kwds): :rtype: dict :returns: object containing url to fetch export from. """ + kwds.update(payload or {}) # PUT instead of POST because multiple requests should just result # in one object being created. history = self.manager.get_accessible(self.decode_id(id), trans.user, current_history=trans.history) @@ -496,9 +508,9 @@ def archive_export(self, trans, id, **kwds): # written. job_id = trans.security.encode_id(job.id) return dict(job_id=job_id) + if up_to_date and jeha.ready: - jeha_id = trans.security.encode_id(jeha.id) - return dict(download_url=url_for("history_archive_download", id=id, jeha_id=jeha_id)) + return self.history_export_view.serialize(trans, id, jeha) else: # Valid request, just resource is not ready yet. trans.response.status = "202 Accepted" @@ -515,19 +527,7 @@ def archive_download(self, trans, id, jeha_id, **kwds): code (instead of 202) with a JSON dictionary containing a `download_url`. """ - # Seems silly to put jeha_id in here, but want GET to be immuatable? - # and this is being accomplished this way. - history = self.manager.get_accessible(self.decode_id(id), trans.user, current_history=trans.history) - matching_exports = [e for e in history.exports if trans.security.encode_id(e.id) == jeha_id] - if not matching_exports: - raise exceptions.ObjectNotFound() - - jeha = matching_exports[0] - if not jeha.ready: - # User should not have been given this URL, PUT export should have - # return a 202. - raise exceptions.MessageException("Export not available or not yet ready.") - + jeha = self.history_export_view.get_ready_jeha(trans, id, jeha_id) return self.serve_ready_history_export(trans, jeha) @expose_api diff --git a/lib/galaxy/webapps/galaxy/buildapp.py b/lib/galaxy/webapps/galaxy/buildapp.py index f94878736e61..225d37da7d8c 100644 --- a/lib/galaxy/webapps/galaxy/buildapp.py +++ b/lib/galaxy/webapps/galaxy/buildapp.py @@ -146,6 +146,7 @@ def app_factory(global_conf, load_app_kwds=None, **kwargs): webapp.add_client_route('/histories/citations') webapp.add_client_route('/histories/list') webapp.add_client_route('/histories/import') + webapp.add_client_route('/histories/{history_id}/export') webapp.add_client_route('/histories/list_published') webapp.add_client_route('/histories/list_shared') webapp.add_client_route('/histories/rename') @@ -496,6 +497,9 @@ def populate_api_routes(webapp, app): controller='page_revisions', parent_resources=dict(member_name='page', collection_name='pages')) + webapp.mapper.connect("history_exports", + "/api/histories/{id}/exports", controller="histories", + action="index_exports", conditions=dict(method=["GET"])) webapp.mapper.connect("history_archive_export", "/api/histories/{id}/exports", controller="histories", action="archive_export", conditions=dict(method=["PUT"])) diff --git a/lib/galaxy/webapps/galaxy/controllers/history.py b/lib/galaxy/webapps/galaxy/controllers/history.py index 34225fd0be75..3d83ea8dfbd0 100644 --- a/lib/galaxy/webapps/galaxy/controllers/history.py +++ b/lib/galaxy/webapps/galaxy/controllers/history.py @@ -240,6 +240,7 @@ class HistoryController(BaseUIController, SharableMixin, UsesAnnotations, UsesIt def __init__(self, app): super().__init__(app) self.history_manager = managers.histories.HistoryManager(app) + self.history_export_view = managers.histories.HistoryExportView(app) self.history_serializer = managers.histories.HistorySerializer(self.app) @web.expose @@ -1097,39 +1098,13 @@ def rate_async(self, trans, id, rating): # TODO: used in display_base.mako @web.expose - def export_archive(self, trans, id=None, gzip=True, include_hidden=False, include_deleted=False, preview=False): + def export_archive(self, trans, id=None, jeha_id="latest"): """ Export a history to an archive. """ # # Get history to export. # - if id: - history = self.history_manager.get_accessible(self.decode_id(id), trans.user, current_history=trans.history) - else: - # Use current history. - history = trans.history - id = trans.security.encode_id(history.id) - if not history: - return trans.show_error_message("This history does not exist or you cannot export this history.") - # If history has already been exported and it has not changed since export, stream it. - jeha = history.latest_export - if jeha and jeha.up_to_date: - if jeha.ready: - if preview: - url = url_for(controller='history', action="export_archive", id=id, qualified=True) - return trans.show_message("History Ready: '%(n)s'. Use this link to download " - "the archive or import it to another Galaxy server: " - "%(u)s" % ({'n': history.name, 'u': url})) - else: - return self.serve_ready_history_export(trans, jeha) - elif jeha.preparing: - return trans.show_message("Still exporting history %(n)s; please check back soon. Link: %(s)s" - % ({'n': history.name, 's': url_for(controller='history', action="export_archive", id=id, qualified=True)})) - self.queue_history_export(trans, history, gzip=gzip, include_hidden=include_hidden, include_deleted=include_deleted) - url = url_for(controller='history', action="export_archive", id=id, qualified=True) - return trans.show_message("Exporting History '%(n)s'. You will need to make this history 'accessible' in order to import this to another galaxy sever.
" - "Use this link to download the archive or import it to another Galaxy server: " - "%(u)s" % ({'share': url_for('/histories/sharing', id=id), 'n': history.name, 'u': url})) - # TODO: used in this file and index.mako + jeha = self.history_export_view.get_ready_jeha(trans, id, jeha_id) + return self.serve_ready_history_export(trans, jeha) @web.expose @web.json