From 6eae5e4fec9be0435a91921881e0a64fab46dffd Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 21 Dec 2020 15:12:32 -0500 Subject: [PATCH 01/12] implement batch processing of new versions to archive --- .../dataverse/DatasetVersionServiceBean.java | 26 ++++++++- .../edu/harvard/iq/dataverse/api/Admin.java | 57 +++++++++++++++++++ 2 files changed, 82 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index e4eb6aac88e..ea6a05a2c3c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -987,7 +987,7 @@ public List> getBasicDatasetVersionInfo(Dataset dataset) - public HashMap getFileMetadataHistory(DataFile df){ + public HashMap getFileMetadataHistory(DataFile df){ if (df == null){ throw new NullPointerException("DataFile 'df' cannot be null"); @@ -1165,4 +1165,28 @@ private DatasetVersion getPreviousVersionWithUnf(DatasetVersion datasetVersion) return null; } + /** + * Execute a query to return DatasetVersion + * + * @param queryString + * @return + */ + public List getUnarchivedDatasetVersions(){ + + String queryString = "select * from datasetversion where releasetime is not null and archivalcopylocation is null;"; + + try{ + TypedQuery query = em.createQuery(queryString, DatasetVersion.class); + List dsl = query.getResultList(); + return dsl; + + } catch (javax.persistence.NoResultException e) { + logger.log(Level.FINE, "No unarchived DatasetVersions found: {0}", queryString); + return null; + } catch (EJBException e) { + logger.log(Level.WARNING, "EJBException exception: {0}", e.getMessage()); + return null; + } + } // end getUnarchivedDatasetVersions + } // end class diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index b52665a7747..81fe1ecd2a9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1722,6 +1722,63 @@ public void run() { } } + + @GET + @Path("/archiveAllUnarchivedDataVersions") + public Response archiveAllUnarchivedDatasetVersions() { + + try { + AuthenticatedUser au = findAuthenticatedUserOrDie(); + // Note - the user is being set in the session so it becomes part of the + // DataverseRequest and is sent to the back-end command where it is used to get + // the API Token which is then used to retrieve files (e.g. via S3 direct + // downloads) to create the Bag + session.setUser(au); + List dsl = datasetversionService.getUnarchivedDatasetVersions(); + if (dsl != null) { + String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName); + AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dsl.get(0)); + + if (cmd != null) { + new Thread(new Runnable() { + public void run() { + int total = dsl.size(); + int successes = 0; + int failures = 0; + for (DatasetVersion dv : dsl) { + try { + AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dv); + + dv = commandEngine.submit(cmd); + if (dv.getArchivalCopyLocation() != null) { + successes++; + logger.info("DatasetVersion id=" + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber() + " submitted to Archive at: " + + dv.getArchivalCopyLocation()); + } else { + failures++; + logger.severe("Error submitting version due to conflict/error at Archive for " + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber()); + } + } catch (CommandException ex) { + logger.log(Level.SEVERE, "Unexpected Exception calling submit archive command", ex); + } + logger.fine(successes + failures + " of " + total + " archive submissions complete"); + } + logger.info("Archiving complete: " + successes + " Successes, " + failures + " Failures. See prior log messages for details."); + } + }).start(); + return ok("Archiving all unarchived published dataset versions using " + cmd.getClass().getCanonicalName() + ". Processing can take significant time for large datasets/ large numbers of dataset versions. View log and/or check archive for results."); + } else { + logger.log(Level.SEVERE, "Could not find Archiver class: " + className); + return error(Status.INTERNAL_SERVER_ERROR, "Could not find Archiver class: " + className); + } + } else { + return error(Status.BAD_REQUEST, "No unarchived published dataset versions found"); + } + } catch (WrappedResponse e1) { + return error(Status.UNAUTHORIZED, "api key required"); + } + } + @DELETE @Path("/clearMetricsCache") public Response clearMetricsCache() { From 8313404e6604daba3ee53d32d9b09e83ebaae9f2 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 21 Dec 2020 15:26:19 -0500 Subject: [PATCH 02/12] add listonly and limit options, count commandEx as failure --- .../edu/harvard/iq/dataverse/api/Admin.java | 24 ++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 81fe1ecd2a9..3c61d2e8919 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -46,6 +46,7 @@ import javax.json.JsonArrayBuilder; import javax.json.JsonObjectBuilder; import javax.ws.rs.DELETE; +import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; @@ -1723,9 +1724,16 @@ public void run() { } + /** + * Iteratively archives all unarchived dataset versions + * @param + * listonly - don't archive, just list unarchived versions + * limit - max number to process + * @return + */ @GET @Path("/archiveAllUnarchivedDataVersions") - public Response archiveAllUnarchivedDatasetVersions() { + public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") boolean listonly, @QueryParam("limit") Integer limit) { try { AuthenticatedUser au = findAuthenticatedUserOrDie(); @@ -1736,6 +1744,16 @@ public Response archiveAllUnarchivedDatasetVersions() { session.setUser(au); List dsl = datasetversionService.getUnarchivedDatasetVersions(); if (dsl != null) { + if (listonly) { + logger.info("Unarchived versions found: "); + int current = 0; + for (DatasetVersion dv : dsl) { + if (limit != null && current > limit) { + break; + } + logger.info(" " + dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber()); + } + } String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName); AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dsl.get(0)); @@ -1746,6 +1764,9 @@ public void run() { int successes = 0; int failures = 0; for (DatasetVersion dv : dsl) { + if (limit != null && (successes + failures) > limit) { + break; + } try { AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dv); @@ -1759,6 +1780,7 @@ public void run() { logger.severe("Error submitting version due to conflict/error at Archive for " + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber()); } } catch (CommandException ex) { + failures++; logger.log(Level.SEVERE, "Unexpected Exception calling submit archive command", ex); } logger.fine(successes + failures + " of " + total + " archive submissions complete"); From 70d923ae08b80d6248acc062ec836ed5812fa645 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 21 Dec 2020 15:36:50 -0500 Subject: [PATCH 03/12] send list in response for listonly --- src/main/java/edu/harvard/iq/dataverse/api/Admin.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 3c61d2e8919..4fd3f43b127 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1745,14 +1745,17 @@ public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") bool List dsl = datasetversionService.getUnarchivedDatasetVersions(); if (dsl != null) { if (listonly) { + JsonArrayBuilder jab = Json.createArrayBuilder(); logger.info("Unarchived versions found: "); int current = 0; for (DatasetVersion dv : dsl) { if (limit != null && current > limit) { break; } + jab.add(dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber()); logger.info(" " + dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber()); } + return ok(jab); } String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName); AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dsl.get(0)); From 96d3723307c26668e5687f4ba61fb80d0d207a16 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 21 Dec 2020 15:51:02 -0500 Subject: [PATCH 04/12] fix query --- .../edu/harvard/iq/dataverse/DatasetVersionServiceBean.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index ea6a05a2c3c..344f8af3b87 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -1173,10 +1173,10 @@ private DatasetVersion getPreviousVersionWithUnf(DatasetVersion datasetVersion) */ public List getUnarchivedDatasetVersions(){ - String queryString = "select * from datasetversion where releasetime is not null and archivalcopylocation is null;"; + String queryString = "SELECT OBJECT(o) FROM DatasetVersion AS o WHERE o.releasetime IS NOT NULL and o.archivalcopylocation IS NULL"; try{ - TypedQuery query = em.createQuery(queryString, DatasetVersion.class); + TypedQuery query = em.createQuery(queryString, DatasetVersion.class); List dsl = query.getResultList(); return dsl; From cb9f374e6452cffa5069ef941a0a5f65a8248ca7 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 21 Dec 2020 16:00:54 -0500 Subject: [PATCH 05/12] case sensitive in query --- .../edu/harvard/iq/dataverse/DatasetVersionServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 344f8af3b87..3f46a25c91e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -1173,7 +1173,7 @@ private DatasetVersion getPreviousVersionWithUnf(DatasetVersion datasetVersion) */ public List getUnarchivedDatasetVersions(){ - String queryString = "SELECT OBJECT(o) FROM DatasetVersion AS o WHERE o.releasetime IS NOT NULL and o.archivalcopylocation IS NULL"; + String queryString = "SELECT OBJECT(o) FROM DatasetVersion AS o WHERE o.releaseTime IS NOT NULL and o.archivalCopyLocation IS NULL"; try{ TypedQuery query = em.createQuery(queryString, DatasetVersion.class); From 76e23960219f7cdf0cde5bede1cf8fda55fddd9e Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 21 Dec 2020 16:24:13 -0500 Subject: [PATCH 06/12] param to only archive latest version --- .../edu/harvard/iq/dataverse/api/Admin.java | 38 +++++++++++-------- 1 file changed, 22 insertions(+), 16 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 4fd3f43b127..e06289dfac8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1729,11 +1729,12 @@ public void run() { * @param * listonly - don't archive, just list unarchived versions * limit - max number to process + * lastestonly - only archive the latest versions * @return */ @GET @Path("/archiveAllUnarchivedDataVersions") - public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") boolean listonly, @QueryParam("limit") Integer limit) { + public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") boolean listonly, @QueryParam("limit") Integer limit, @QueryParam("latestonly") boolean latestonly) { try { AuthenticatedUser au = findAuthenticatedUserOrDie(); @@ -1752,8 +1753,11 @@ public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") bool if (limit != null && current > limit) { break; } - jab.add(dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber()); - logger.info(" " + dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber()); + if (!latestonly || dv.equals(dv.getDataset().getLatestVersionForCopy())) { + jab.add(dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber()); + logger.info(" " + dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber()); + current++; + } } return ok(jab); } @@ -1770,21 +1774,23 @@ public void run() { if (limit != null && (successes + failures) > limit) { break; } - try { - AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dv); - - dv = commandEngine.submit(cmd); - if (dv.getArchivalCopyLocation() != null) { - successes++; - logger.info("DatasetVersion id=" + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber() + " submitted to Archive at: " - + dv.getArchivalCopyLocation()); - } else { + if (!latestonly || dv.equals(dv.getDataset().getLatestVersionForCopy())) { + try { + AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dv); + + dv = commandEngine.submit(cmd); + if (dv.getArchivalCopyLocation() != null) { + successes++; + logger.info("DatasetVersion id=" + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber() + " submitted to Archive at: " + + dv.getArchivalCopyLocation()); + } else { + failures++; + logger.severe("Error submitting version due to conflict/error at Archive for " + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber()); + } + } catch (CommandException ex) { failures++; - logger.severe("Error submitting version due to conflict/error at Archive for " + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber()); + logger.log(Level.SEVERE, "Unexpected Exception calling submit archive command", ex); } - } catch (CommandException ex) { - failures++; - logger.log(Level.SEVERE, "Unexpected Exception calling submit archive command", ex); } logger.fine(successes + failures + " of " + total + " archive submissions complete"); } From 2e8d990ad4b75719c2d8e6b35a0f3d104822f3c3 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 21 Dec 2020 16:41:58 -0500 Subject: [PATCH 07/12] off by one in limit --- src/main/java/edu/harvard/iq/dataverse/api/Admin.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index e06289dfac8..9f819ff13a5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1750,7 +1750,7 @@ public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") bool logger.info("Unarchived versions found: "); int current = 0; for (DatasetVersion dv : dsl) { - if (limit != null && current > limit) { + if (limit != null && current >= limit) { break; } if (!latestonly || dv.equals(dv.getDataset().getLatestVersionForCopy())) { @@ -1771,7 +1771,7 @@ public void run() { int successes = 0; int failures = 0; for (DatasetVersion dv : dsl) { - if (limit != null && (successes + failures) > limit) { + if (limit != null && (successes + failures) >= limit) { break; } if (!latestonly || dv.equals(dv.getDataset().getLatestVersionForCopy())) { From b7968333b5950f44bbf086ebc1d020ee4ca4535f Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 23 Dec 2020 11:52:43 -0500 Subject: [PATCH 08/12] documentation --- doc/sphinx-guides/source/installation/config.rst | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 4a877eabff7..5b9433d7c31 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -866,9 +866,9 @@ For example: ``cp /usr/local/payara5/glassfish/domains/domain1/files/googlecloudkey.json`` -.. _Archiving API Call: +.. _Archiving API Calls: -API Call +API Calls ++++++++ Once this configuration is complete, you, as a user with the *PublishDataset* permission, should be able to use the API call to manually submit a DatasetVersion for processing: @@ -881,6 +881,18 @@ where: ``{version}`` is the friendly version number, e.g. "1.2". +A batch API call is also available that will attempt to archive any currently unarchived dataset versions: + +``curl -H "X-Dataverse-key: " http://localhost:8080/api/admin/archiveAllUnarchivedDataVersions`` + +The call supports three optional query parameters that can be used in combination: + +``listonly={true/false}`` default is false. Using true retrieves the list of unarchived versions but does not attempt to archive any + +``latestonly={true/false}`` default is false. Using true only lists/processes the most recently published version of a given dataset (instead of all published versions) + +``limit={n}`` default is no limit/process all unarchived versions (subject to other parameters). Defines a maximum number of versions to attempt to archive in response to one invocation of the API call. + The submitDataVersionToArchive API (and the workflow discussed below) attempt to archive the dataset version via an archive specific method. For Chronopolis, a DuraCloud space named for the dataset (it's DOI with ':' and '.' replaced with '-') is created and two files are uploaded to it: a version-specific datacite.xml metadata file and a BagIt bag containing the data and an OAI-ORE map file. (The datacite.xml file, stored outside the Bag as well as inside is intended to aid in discovery while the ORE map file is 'complete', containing all user-entered metadata and is intended as an archival record.) In the Chronopolis case, since the transfer from the DuraCloud front-end to archival storage in Chronopolis can take significant time, it is currently up to the admin/curator to submit a 'snap-shot' of the space within DuraCloud and to monitor its successful transfer. Once transfer is complete the space should be deleted, at which point the Dataverse API call can be used to submit a Bag for other versions of the same Dataset. (The space is reused, so that archival copies of different Dataset versions correspond to different snapshots of the same DuraCloud space.). From 006a4baff870ebd1c11c86caaacaf96511fadd0c Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 8 Jan 2021 12:28:55 -0500 Subject: [PATCH 09/12] Update doc/sphinx-guides/source/installation/config.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/installation/config.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 5b9433d7c31..84ec0699d62 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -869,7 +869,7 @@ For example: .. _Archiving API Calls: API Calls -++++++++ ++++++++++ Once this configuration is complete, you, as a user with the *PublishDataset* permission, should be able to use the API call to manually submit a DatasetVersion for processing: From bba8ba0a13703410a9196713c6920150291d4643 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 8 Jan 2021 12:29:20 -0500 Subject: [PATCH 10/12] Update doc/sphinx-guides/source/installation/config.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/installation/config.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 84ec0699d62..a997f0e353f 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -887,7 +887,7 @@ A batch API call is also available that will attempt to archive any currently un The call supports three optional query parameters that can be used in combination: -``listonly={true/false}`` default is false. Using true retrieves the list of unarchived versions but does not attempt to archive any +``listonly={true/false}`` default is false. Using true retrieves the list of unarchived versions but does not attempt to archive any. ``latestonly={true/false}`` default is false. Using true only lists/processes the most recently published version of a given dataset (instead of all published versions) From 011c97a4b73775cf152e0cf06127d8da9e8d2780 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 8 Jan 2021 12:29:46 -0500 Subject: [PATCH 11/12] Update doc/sphinx-guides/source/installation/config.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/installation/config.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index a997f0e353f..67ee66af763 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -889,7 +889,7 @@ The call supports three optional query parameters that can be used in combinatio ``listonly={true/false}`` default is false. Using true retrieves the list of unarchived versions but does not attempt to archive any. -``latestonly={true/false}`` default is false. Using true only lists/processes the most recently published version of a given dataset (instead of all published versions) +``latestonly={true/false}`` default is false. Using true only lists/processes the most recently published version of a given dataset (instead of all published versions). ``limit={n}`` default is no limit/process all unarchived versions (subject to other parameters). Defines a maximum number of versions to attempt to archive in response to one invocation of the API call. From 1a1c28ccb7a6c0427f349cd8569c516bca43bf68 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 8 Jan 2021 13:10:22 -0500 Subject: [PATCH 12/12] updates per review --- .../dataverse/DatasetVersionServiceBean.java | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 3f46a25c91e..33cc236b902 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -986,8 +986,8 @@ public List> getBasicDatasetVersionInfo(Dataset dataset) } // end getBasicDatasetVersionInfo - - public HashMap getFileMetadataHistory(DataFile df){ + //Not used? + public HashMap getFileMetadataHistory(DataFile df){ if (df == null){ throw new NullPointerException("DataFile 'df' cannot be null"); @@ -1175,18 +1175,18 @@ public List getUnarchivedDatasetVersions(){ String queryString = "SELECT OBJECT(o) FROM DatasetVersion AS o WHERE o.releaseTime IS NOT NULL and o.archivalCopyLocation IS NULL"; - try{ + try { TypedQuery query = em.createQuery(queryString, DatasetVersion.class); List dsl = query.getResultList(); return dsl; - + } catch (javax.persistence.NoResultException e) { logger.log(Level.FINE, "No unarchived DatasetVersions found: {0}", queryString); return null; - } catch (EJBException e) { - logger.log(Level.WARNING, "EJBException exception: {0}", e.getMessage()); - return null; - } + } catch (EJBException e) { + logger.log(Level.WARNING, "EJBException exception: {0}", e.getMessage()); + return null; + } } // end getUnarchivedDatasetVersions - + } // end class