From 3dd63554f01a07f85f2690a27a7be8ace518a86d Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 21 Jun 2024 10:29:35 +0100 Subject: [PATCH 01/21] Stash: including input levels to addDataverse endpoint WIP --- .../harvard/iq/dataverse/api/Dataverses.java | 31 +++++++++---------- 1 file changed, 15 insertions(+), 16 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 02b60fdb32a..f0c1d04502e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -123,35 +123,36 @@ public Response addRoot(@Context ContainerRequestContext crc, String body) { @AuthRequired @Path("{identifier}") public Response addDataverse(@Context ContainerRequestContext crc, String body, @PathParam("identifier") String parentIdtf) { - - Dataverse d; - JsonObject dvJson; + Dataverse newDataverse; + JsonObject newDataverseJson; try { - dvJson = JsonUtil.getJsonObject(body); - d = jsonParser().parseDataverse(dvJson); + newDataverseJson = JsonUtil.getJsonObject(body); + newDataverse = jsonParser().parseDataverse(newDataverseJson); } catch (JsonParsingException jpe) { logger.log(Level.SEVERE, "Json: {0}", body); return error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage()); } catch (JsonParseException ex) { logger.log(Level.SEVERE, "Error parsing dataverse from json: " + ex.getMessage(), ex); - return error(Response.Status.BAD_REQUEST, - "Error parsing the POSTed json into a dataverse: " + ex.getMessage()); + return error(Response.Status.BAD_REQUEST, "Error parsing the POSTed json into a dataverse: " + ex.getMessage()); } try { + JsonArray inputLevels = newDataverseJson.getJsonArray("inputLevels"); + List newInputLevels = inputLevels != null ? parseInputLevels(inputLevels, newDataverse) : null; + if (!parentIdtf.isEmpty()) { Dataverse owner = findDataverseOrDie(parentIdtf); - d.setOwner(owner); + newDataverse.setOwner(owner); } // set the dataverse - contact relationship in the contacts - for (DataverseContact dc : d.getDataverseContacts()) { - dc.setDataverse(d); + for (DataverseContact dc : newDataverse.getDataverseContacts()) { + dc.setDataverse(newDataverse); } AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); - d = execCommand(new CreateDataverseCommand(d, createDataverseRequest(u), null, null)); - return created("/dataverses/" + d.getAlias(), json(d)); + newDataverse = execCommand(new CreateDataverseCommand(newDataverse, createDataverseRequest(u), null, newInputLevels)); + return created("/dataverses/" + newDataverse.getAlias(), json(newDataverse)); } catch (WrappedResponse ww) { String error = ConstraintViolationUtil.getErrorStringForConstraintViolations(ww.getCause()); @@ -643,7 +644,7 @@ public Response updateAttribute(@Context ContainerRequestContext crc, @PathParam public Response updateInputLevels(@Context ContainerRequestContext crc, @PathParam("identifier") String identifier, String jsonBody) { try { Dataverse dataverse = findDataverseOrDie(identifier); - List newInputLevels = parseInputLevels(jsonBody, dataverse); + List newInputLevels = parseInputLevels(Json.createReader(new StringReader(jsonBody)).readArray(), dataverse); execCommand(new UpdateDataverseInputLevelsCommand(dataverse, createDataverseRequest(getRequestUser(crc)), newInputLevels)); return ok(BundleUtil.getStringFromBundle("dataverse.update.success"), JsonPrinter.json(dataverse)); } catch (WrappedResponse e) { @@ -651,9 +652,7 @@ public Response updateInputLevels(@Context ContainerRequestContext crc, @PathPar } } - private List parseInputLevels(String jsonBody, Dataverse dataverse) throws WrappedResponse { - JsonArray inputLevelsArray = Json.createReader(new StringReader(jsonBody)).readArray(); - + private List parseInputLevels(JsonArray inputLevelsArray, Dataverse dataverse) throws WrappedResponse { List newInputLevels = new ArrayList<>(); for (JsonValue value : inputLevelsArray) { JsonObject inputLevel = (JsonObject) value; From 1cc48b702f46ea5166e6774cd38fc09131c3283a Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 21 Jun 2024 10:49:59 +0100 Subject: [PATCH 02/21] Stash: including facet ids to addDataverse endpoint WIP --- .../harvard/iq/dataverse/api/Dataverses.java | 35 +++++++++++++------ 1 file changed, 24 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index f0c1d04502e..99d74ba7fb9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -140,6 +140,9 @@ public Response addDataverse(@Context ContainerRequestContext crc, String body, JsonArray inputLevels = newDataverseJson.getJsonArray("inputLevels"); List newInputLevels = inputLevels != null ? parseInputLevels(inputLevels, newDataverse) : null; + JsonArray facetIds = newDataverseJson.getJsonArray("facetIds"); + List facetList = facetIds != null ? parseFacets(facetIds) : null; + if (!parentIdtf.isEmpty()) { Dataverse owner = findDataverseOrDie(parentIdtf); newDataverse.setOwner(owner); @@ -151,7 +154,7 @@ public Response addDataverse(@Context ContainerRequestContext crc, String body, } AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); - newDataverse = execCommand(new CreateDataverseCommand(newDataverse, createDataverseRequest(u), null, newInputLevels)); + newDataverse = execCommand(new CreateDataverseCommand(newDataverse, createDataverseRequest(u), facetList, newInputLevels)); return created("/dataverses/" + newDataverse.getAlias(), json(newDataverse)); } catch (WrappedResponse ww) { @@ -673,6 +676,20 @@ private List parseInputLevels(JsonArray inputLevel return newInputLevels; } + private List parseFacets(JsonArray facetsArray) throws WrappedResponse { + List facets = new LinkedList<>(); + for (JsonString facetId : facetsArray.getValuesAs(JsonString.class)) { + DatasetFieldType dsfType = findDatasetFieldType(facetId.getString()); + if (dsfType == null) { + throw new WrappedResponse(badRequest("Can't find dataset field type '" + facetId + "'")); + } else if (!dsfType.isFacetable()) { + throw new WrappedResponse(badRequest("Dataset field type '" + facetId + "' is not facetable")); + } + facets.add(dsfType); + } + return facets; + } + @DELETE @AuthRequired @Path("{linkingDataverseId}/deleteLink/{linkedDataverseId}") @@ -921,16 +938,12 @@ public Response deleteFeaturedCollections(@Context ContainerRequestContext crc, * (judging by the UI). This triggers a 500 when '-d @foo.json' is used. */ public Response setFacets(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, String facetIds) { - - List facets = new LinkedList<>(); - for (JsonString facetId : Util.asJsonArray(facetIds).getValuesAs(JsonString.class)) { - DatasetFieldType dsfType = findDatasetFieldType(facetId.getString()); - if (dsfType == null) { - return error(Response.Status.BAD_REQUEST, "Can't find dataset field type '" + facetId + "'"); - } else if (!dsfType.isFacetable()) { - return error(Response.Status.BAD_REQUEST, "Dataset field type '" + facetId + "' is not facetable"); - } - facets.add(dsfType); + JsonArray jsonArray = Util.asJsonArray(facetIds); + List facets; + try { + facets = parseFacets(jsonArray); + } catch (WrappedResponse e) { + return e.getResponse(); } try { From 8c88d14fe88267ee036cd775c61ab68ff0a5e8ed Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 21 Jun 2024 10:58:11 +0100 Subject: [PATCH 03/21] Changed: error messages extracted to Bundle.properties --- src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java | 6 +++--- src/main/java/propertyFiles/Bundle.properties | 4 +++- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 99d74ba7fb9..0a2fbbc5759 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -663,7 +663,7 @@ private List parseInputLevels(JsonArray inputLevel DatasetFieldType datasetFieldType = datasetFieldSvc.findByName(datasetFieldTypeName); if (datasetFieldType == null) { - String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.updateinputlevels.error.invalidfieldtypename"), datasetFieldTypeName); + String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.inputlevels.error.invalidfieldtypename"), datasetFieldTypeName); throw new WrappedResponse(badRequest(errorMessage)); } @@ -681,9 +681,9 @@ private List parseFacets(JsonArray facetsArray) throws Wrapped for (JsonString facetId : facetsArray.getValuesAs(JsonString.class)) { DatasetFieldType dsfType = findDatasetFieldType(facetId.getString()); if (dsfType == null) { - throw new WrappedResponse(badRequest("Can't find dataset field type '" + facetId + "'")); + throw new WrappedResponse(badRequest(MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.facets.error.fieldtypenotfound"), facetId))); } else if (!dsfType.isFacetable()) { - throw new WrappedResponse(badRequest("Dataset field type '" + facetId + "' is not facetable")); + throw new WrappedResponse(badRequest(MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.facets.error.fieldtypenotfacetable"), facetId))); } facets.add(dsfType); } diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 2996ccb509b..b333980f649 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -947,7 +947,9 @@ dataverse.default=(Default) dataverse.metadatalanguage.setatdatasetcreation=Chosen at Dataset Creation dataverse.guestbookentry.atdownload=Guestbook Entry At Download dataverse.guestbookentry.atrequest=Guestbook Entry At Access Request -dataverse.updateinputlevels.error.invalidfieldtypename=Invalid dataset field type name: {0} +dataverse.inputlevels.error.invalidfieldtypename=Invalid dataset field type name: {0} +dataverse.facets.error.fieldtypenotfound=Can't find dataset field type '{0}' +dataverse.facets.error.fieldtypenotfacetable=Dataset field type '{0}' is not facetable # rolesAndPermissionsFragment.xhtml # advanced.xhtml From 7840a0290dd1447e8551cd4e33612c5ef4daf044 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 24 Jun 2024 11:11:24 +0100 Subject: [PATCH 04/21] Added: addDataverse IT --- .../harvard/iq/dataverse/api/Dataverses.java | 31 ++++++++++- .../impl/ListDataverseInputLevelsCommand.java | 40 ++++++++++++++ .../command/impl/ListFacetsCommand.java | 22 +++++--- .../iq/dataverse/util/json/JsonPrinter.java | 24 +++++++++ .../iq/dataverse/api/DataversesIT.java | 36 +++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 53 ++++++++++++++++--- 6 files changed, 190 insertions(+), 16 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseInputLevelsCommand.java diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 0a2fbbc5759..658ea16b0e2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -29,6 +29,7 @@ import edu.harvard.iq.dataverse.util.ConstraintViolationUtil; import edu.harvard.iq.dataverse.util.StringUtil; import static edu.harvard.iq.dataverse.util.StringUtil.nonEmpty; +import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*; import edu.harvard.iq.dataverse.util.json.JSONLDUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; @@ -60,8 +61,7 @@ import jakarta.ws.rs.core.MediaType; import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.Response.Status; -import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray; -import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; + import java.io.IOException; import java.io.OutputStream; import java.text.MessageFormat; @@ -641,6 +641,19 @@ public Response updateAttribute(@Context ContainerRequestContext crc, @PathParam } } + @GET + @AuthRequired + @Path("{identifier}/inputLevels") + public Response getInputLevels(@Context ContainerRequestContext crc, @PathParam("identifier") String identifier) { + try { + Dataverse dataverse = findDataverseOrDie(identifier); + List inputLevels = execCommand(new ListDataverseInputLevelsCommand(createDataverseRequest(getRequestUser(crc)), dataverse)); + return ok(jsonDataverseInputLevels(inputLevels)); + } catch (WrappedResponse e) { + return e.getResponse(); + } + } + @PUT @AuthRequired @Path("{identifier}/inputLevels") @@ -957,6 +970,20 @@ public Response setFacets(@Context ContainerRequestContext crc, @PathParam("iden } } + @GET + @AuthRequired + @Path("{identifier}/facets") + @Produces(MediaType.APPLICATION_JSON) + public Response getFacets(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) { + try { + Dataverse dataverse = findDataverseOrDie(dvIdtf); + List dataverseFacets = execCommand(new ListFacetsCommand(createDataverseRequest(getRequestUser(crc)), dataverse, false)); + return ok(jsonDataverseFacets(dataverseFacets)); + } catch (WrappedResponse e) { + return e.getResponse(); + } + } + @GET @AuthRequired @Path("{identifier}/metadatablockfacets") diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseInputLevelsCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseInputLevelsCommand.java new file mode 100644 index 00000000000..1727ac9698f --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseInputLevelsCommand.java @@ -0,0 +1,40 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DataverseFacet; +import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevel; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * List the field type input levels {@link DataverseFieldTypeInputLevel} of a {@link Dataverse}. + */ +public class ListDataverseInputLevelsCommand extends AbstractCommand> { + + private final Dataverse dataverse; + + public ListDataverseInputLevelsCommand(DataverseRequest request, Dataverse dataverse) { + super(request, dataverse); + this.dataverse = dataverse; + } + + @Override + public List execute(CommandContext ctxt) throws CommandException { + return dataverse.getDataverseFieldTypeInputLevels(); + } + + @Override + public Map> getRequiredPermissions() { + return Collections.singletonMap("", + dataverse.isReleased() ? Collections.emptySet() + : Collections.singleton(Permission.ViewUnpublishedDataverse)); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListFacetsCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListFacetsCommand.java index cbab378ccac..36bd1ef4981 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListFacetsCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListFacetsCommand.java @@ -7,6 +7,7 @@ import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; + import java.util.Collections; import java.util.List; import java.util.Map; @@ -14,27 +15,34 @@ /** * List the search facets {@link DataverseFacet} of a {@link Dataverse}. + * * @author michaelsuo */ // no annotations here, since permissions are dynamically decided public class ListFacetsCommand extends AbstractCommand> { - private final Dataverse dv; + private final Dataverse dataverse; + private boolean rootFacets; + + public ListFacetsCommand(DataverseRequest request, Dataverse dataverse) { + this(request, dataverse, true); + } - public ListFacetsCommand(DataverseRequest aRequest, Dataverse aDataverse) { - super(aRequest, aDataverse); - dv = aDataverse; + public ListFacetsCommand(DataverseRequest request, Dataverse dataverse, boolean rootFacets) { + super(request, dataverse); + this.dataverse = dataverse; + this.rootFacets = rootFacets; } @Override public List execute(CommandContext ctxt) throws CommandException { - return dv.getDataverseFacets(); + return dataverse.getDataverseFacets(!rootFacets); } @Override public Map> getRequiredPermissions() { return Collections.singletonMap("", - dv.isReleased() ? Collections.emptySet() - : Collections.singleton(Permission.ViewUnpublishedDataverse)); + dataverse.isReleased() ? Collections.emptySet() + : Collections.singleton(Permission.ViewUnpublishedDataverse)); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 95f14b79ece..16456682f30 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -1376,4 +1376,28 @@ public static JsonArrayBuilder jsonDataverseFieldTypeInputLevels(List facets) { + JsonArrayBuilder facetsArrayBuilder = Json.createArrayBuilder(); + for (DataverseFacet facet : facets) { + facetsArrayBuilder.add(json(facet)); + } + return facetsArrayBuilder; + } + + public static JsonArrayBuilder jsonDataverseInputLevels(List inputLevels) { + JsonArrayBuilder inputLevelsArrayBuilder = Json.createArrayBuilder(); + for (DataverseFieldTypeInputLevel inputLevel : inputLevels) { + inputLevelsArrayBuilder.add(jsonDataverseInputLevel(inputLevel)); + } + return inputLevelsArrayBuilder; + } + + private static JsonObjectBuilder jsonDataverseInputLevel(DataverseFieldTypeInputLevel inputLevel) { + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + jsonObjectBuilder.add("datasetFieldTypeName", inputLevel.getDatasetFieldType().getName()); + jsonObjectBuilder.add("required", inputLevel.isRequired()); + jsonObjectBuilder.add("include", inputLevel.isInclude()); + return jsonObjectBuilder; + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index b072a803aa4..d939aa8afce 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -926,4 +926,40 @@ public void testUpdateInputLevels() { .body("message", equalTo("Error while updating dataverse input levels: Input level list cannot be null or empty")) .statusCode(INTERNAL_SERVER_ERROR.getStatusCode()); } + + @Test + public void testAddDataverse() { + Response createUser = UtilIT.createRandomUser(); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + String testAliasSuffix = "-add-dataverse"; + + // Without optional input levels and facet ids + String testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; + Response createSubDataverseWithInputLevelsAndFacetIdsResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root"); + createSubDataverseWithInputLevelsAndFacetIdsResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + + // With optional input levels and facet ids + String[] testInputLevelNames = {"geographicCoverage", "country"}; + String[] testFacetIds = {"authorName", "authorAffiliation"}; + testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; + createSubDataverseWithInputLevelsAndFacetIdsResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInputLevelNames, testFacetIds); + createSubDataverseWithInputLevelsAndFacetIdsResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + + // Assert facets are configured + Response listDataverseFacetsResponse = UtilIT.listDataverseFacets(testDataverseAlias, apiToken); + String[] expectedFacetNames = {"Author Name", "Author Affiliation"}; + String actualFacetName1 = listDataverseFacetsResponse.then().extract().path("data[0].name"); + String actualFacetName2 = listDataverseFacetsResponse.then().extract().path("data[1].name"); + assertNotEquals(actualFacetName1, actualFacetName2); + assertThat(expectedFacetNames, hasItemInArray(actualFacetName1)); + assertThat(expectedFacetNames, hasItemInArray(actualFacetName2)); + + // Assert input levels are configured + Response listDataverseInputLevelsResponse = UtilIT.listDataverseInputLevels(testDataverseAlias, apiToken); + String actualInputLevelName1 = listDataverseInputLevelsResponse.then().extract().path("data[0].datasetFieldTypeName"); + String actualInputLevelName2 = listDataverseInputLevelsResponse.then().extract().path("data[1].datasetFieldTypeName"); + assertNotEquals(actualFacetName1, actualFacetName2); + assertThat(testInputLevelNames, hasItemInArray(actualInputLevelName1)); + assertThat(testInputLevelNames, hasItemInArray(actualInputLevelName2)); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index b9ae97649a9..68dc93945fd 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -358,20 +358,45 @@ public static Response getServiceDocument(String apiToken) { static Response createDataverse(String alias, String category, String apiToken) { return createSubDataverse(alias, category, apiToken, ":root"); } - + static Response createSubDataverse(String alias, String category, String apiToken, String parentDV) { + return createSubDataverse(alias, category, apiToken, parentDV, null, null); + } + + static Response createSubDataverse(String alias, String category, String apiToken, String parentDV, String[] inputLevelNames, String[] facetIds) { JsonArrayBuilder contactArrayBuilder = Json.createArrayBuilder(); contactArrayBuilder.add(Json.createObjectBuilder().add("contactEmail", getEmailFromUserName(getRandomIdentifier()))); JsonArrayBuilder subjectArrayBuilder = Json.createArrayBuilder(); subjectArrayBuilder.add("Other"); - JsonObject dvData = Json.createObjectBuilder() + JsonObjectBuilder objectBuilder = Json.createObjectBuilder() .add("alias", alias) .add("name", alias) .add("dataverseContacts", contactArrayBuilder) .add("dataverseSubjects", subjectArrayBuilder) // don't send "dataverseType" if category is null, must be a better way - .add(category != null ? "dataverseType" : "notTheKeyDataverseType", category != null ? category : "whatever") - .build(); + .add(category != null ? "dataverseType" : "notTheKeyDataverseType", category != null ? category : "whatever"); + + if (inputLevelNames != null) { + JsonArrayBuilder inputLevelsArrayBuilder = Json.createArrayBuilder(); + for(String inputLevelName : inputLevelNames) { + inputLevelsArrayBuilder.add(Json.createObjectBuilder() + .add("datasetFieldTypeName", inputLevelName) + .add("required", true) + .add("include", true) + ); + } + objectBuilder.add("inputLevels", inputLevelsArrayBuilder); + } + + if (facetIds != null) { + JsonArrayBuilder facetIdsArrayBuilder = Json.createArrayBuilder(); + for(String facetId : facetIds) { + facetIdsArrayBuilder.add(facetId); + } + objectBuilder.add("facetIds", facetIdsArrayBuilder); + } + + JsonObject dvData = objectBuilder.build(); Response createDataverseResponse = given() .body(dvData.toString()).contentType(ContentType.JSON) .when().post("/api/dataverses/" + parentDV + "?key=" + apiToken); @@ -3940,9 +3965,9 @@ static Response requestGlobusUploadPaths(Integer datasetId, JsonObject body, Str } static Response updateDataverseInputLevels(String dataverseAlias, String[] inputLevelNames, String apiToken) { - JsonArrayBuilder contactArrayBuilder = Json.createArrayBuilder(); + JsonArrayBuilder inputLevelsArrayBuilder = Json.createArrayBuilder(); for(String inputLevelName : inputLevelNames) { - contactArrayBuilder.add(Json.createObjectBuilder() + inputLevelsArrayBuilder.add(Json.createObjectBuilder() .add("datasetFieldTypeName", inputLevelName) .add("required", true) .add("include", true) @@ -3950,7 +3975,7 @@ static Response updateDataverseInputLevels(String dataverseAlias, String[] input } return given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .body(contactArrayBuilder.build().toString()) + .body(inputLevelsArrayBuilder.build().toString()) .contentType(ContentType.JSON) .put("/api/dataverses/" + dataverseAlias + "/inputLevels"); } @@ -3962,4 +3987,18 @@ public static Response getOpenAPI(String accept, String format) { .get("/openapi"); return response; } + + static Response listDataverseFacets(String dataverseAlias, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .contentType("application/json") + .get("/api/dataverses/" + dataverseAlias + "/facets"); + } + + static Response listDataverseInputLevels(String dataverseAlias, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .contentType("application/json") + .get("/api/dataverses/" + dataverseAlias + "/inputLevels"); + } } From 1651f8083131dc31e7c1f49e9398125939f8ef1f Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 28 Jun 2024 15:11:42 +0100 Subject: [PATCH 05/21] Added: metadata block selection and input level and facets setup fixed for addDaraverse API --- .../harvard/iq/dataverse/api/Dataverses.java | 34 ++++++++-- .../command/impl/CreateDataverseCommand.java | 67 +++++++++++-------- .../iq/dataverse/api/DataversesIT.java | 11 ++- .../edu/harvard/iq/dataverse/api/UtilIT.java | 18 ++++- 4 files changed, 86 insertions(+), 44 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 658ea16b0e2..1ab86666947 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -137,11 +137,19 @@ public Response addDataverse(@Context ContainerRequestContext crc, String body, } try { - JsonArray inputLevels = newDataverseJson.getJsonArray("inputLevels"); - List newInputLevels = inputLevels != null ? parseInputLevels(inputLevels, newDataverse) : null; + JsonObject metadataBlocksJson = newDataverseJson.getJsonObject("metadataBlocks"); + List inputLevels = null; + List selectedMetadataBlocks = null; + if (metadataBlocksJson != null) { + JsonArray inputLevelsArray = metadataBlocksJson.getJsonArray("inputLevels"); + inputLevels = inputLevelsArray != null ? parseInputLevels(inputLevelsArray, newDataverse) : null; + + JsonArray selectedMetadataBlocksArray = metadataBlocksJson.getJsonArray("metadataBlockNames"); + selectedMetadataBlocks = selectedMetadataBlocksArray != null ? parseNewDataverseMetadataBlocks(selectedMetadataBlocksArray) : null; + } - JsonArray facetIds = newDataverseJson.getJsonArray("facetIds"); - List facetList = facetIds != null ? parseFacets(facetIds) : null; + JsonArray facetIdsArray = newDataverseJson.getJsonArray("facetIds"); + List facetList = facetIdsArray != null ? parseFacets(facetIdsArray) : null; if (!parentIdtf.isEmpty()) { Dataverse owner = findDataverseOrDie(parentIdtf); @@ -154,7 +162,7 @@ public Response addDataverse(@Context ContainerRequestContext crc, String body, } AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); - newDataverse = execCommand(new CreateDataverseCommand(newDataverse, createDataverseRequest(u), facetList, newInputLevels)); + newDataverse = execCommand(new CreateDataverseCommand(newDataverse, createDataverseRequest(u), facetList, inputLevels, selectedMetadataBlocks)); return created("/dataverses/" + newDataverse.getAlias(), json(newDataverse)); } catch (WrappedResponse ww) { @@ -183,7 +191,21 @@ public Response addDataverse(@Context ContainerRequestContext crc, String body, } } - + + private List parseNewDataverseMetadataBlocks(JsonArray selectedMetadataBlocksArray) throws WrappedResponse { + List selectedMetadataBlocks = new ArrayList<>(); + for (JsonString metadataBlockName : selectedMetadataBlocksArray.getValuesAs(JsonString.class)) { + MetadataBlock metadataBlock = metadataBlockSvc.findByName(metadataBlockName.getString()); + if (metadataBlock == null) { + // TODO + throw new WrappedResponse(badRequest("TODO")); + } + selectedMetadataBlocks.add(metadataBlock); + } + + return selectedMetadataBlocks; + } + @POST @AuthRequired @Path("{identifier}/validateDatasetJson") diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index 3efefe90681..2cb0b2245b1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -1,17 +1,11 @@ package edu.harvard.iq.dataverse.engine.command.impl; -import edu.harvard.iq.dataverse.DatasetFieldType; -import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevel; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.authorization.DataverseRole; -import edu.harvard.iq.dataverse.RoleAssignment; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.groups.Group; -import edu.harvard.iq.dataverse.authorization.groups.GroupProvider; -import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupProvider; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; -import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -19,15 +13,12 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import java.io.IOException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.List; -import java.util.logging.Logger; -import org.apache.solr.client.solrj.SolrServerException; /** * TODO make override the date and user more active, so prevent code errors. @@ -38,14 +29,23 @@ @RequiredPermissions(Permission.AddDataverse) public class CreateDataverseCommand extends AbstractCommand { - private static final Logger logger = Logger.getLogger(CreateDataverseCommand.class.getName()); - private final Dataverse created; private final List inputLevelList; private final List facetList; + private final List metadataBlocks; + + public CreateDataverseCommand(Dataverse created, + DataverseRequest aRequest, + List facetList, + List inputLevelList) { + this(created, aRequest, facetList, inputLevelList, null); + } - public CreateDataverseCommand(Dataverse created, DataverseRequest aRequest, List facetList, - List inputLevelList) { + public CreateDataverseCommand(Dataverse created, + DataverseRequest aRequest, + List facetList, + List inputLevelList, + List metadataBlocks) { super(aRequest, created.getOwner()); this.created = created; if (facetList != null) { @@ -58,6 +58,11 @@ public CreateDataverseCommand(Dataverse created, DataverseRequest aRequest, List } else { this.inputLevelList = null; } + if (metadataBlocks != null) { + this.metadataBlocks = new ArrayList<>(metadataBlocks); + } else { + this.metadataBlocks = null; + } } @Override @@ -70,6 +75,11 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } } + if (metadataBlocks != null && !metadataBlocks.isEmpty()) { + created.setMetadataBlockRoot(true); + created.setMetadataBlocks(metadataBlocks); + } + if (created.getCreateDate() == null) { created.setCreateDate(new Timestamp(new Date().getTime())); } @@ -97,8 +107,8 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { if (ctxt.dataverses().findByAlias(created.getAlias()) != null) { throw new IllegalCommandException("A dataverse with alias " + created.getAlias() + " already exists", this); } - - if(created.getFilePIDsEnabled()!=null && !ctxt.settings().isTrueForKey(SettingsServiceBean.Key.AllowEnablingFilePIDsPerCollection, false)) { + + if (created.getFilePIDsEnabled() != null && !ctxt.settings().isTrueForKey(SettingsServiceBean.Key.AllowEnablingFilePIDsPerCollection, false)) { throw new IllegalCommandException("File PIDs cannot be enabled per collection", this); } @@ -109,7 +119,7 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { DataverseRole adminRole = ctxt.roles().findBuiltinRoleByAlias(DataverseRole.ADMIN); String privateUrlToken = null; - ctxt.roles().save(new RoleAssignment(adminRole, getRequest().getUser(), managedDv, privateUrlToken),false); + ctxt.roles().save(new RoleAssignment(adminRole, getRequest().getUser(), managedDv, privateUrlToken), false); // Add additional role assignments if inheritance is set boolean inheritAllRoles = false; String rolesString = ctxt.settings().getValueForKey(SettingsServiceBean.Key.InheritParentRoleAssignments, ""); @@ -129,18 +139,18 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { // above... if ((inheritAllRoles || rolesToInherit.contains(role.getRole().getAlias())) && !(role.getAssigneeIdentifier().equals(getRequest().getUser().getIdentifier()) - && role.getRole().equals(adminRole))) { + && role.getRole().equals(adminRole))) { String identifier = role.getAssigneeIdentifier(); if (identifier.startsWith(AuthenticatedUser.IDENTIFIER_PREFIX)) { identifier = identifier.substring(AuthenticatedUser.IDENTIFIER_PREFIX.length()); ctxt.roles().save(new RoleAssignment(role.getRole(), - ctxt.authentication().getAuthenticatedUser(identifier), managedDv, privateUrlToken),false); + ctxt.authentication().getAuthenticatedUser(identifier), managedDv, privateUrlToken), false); } else if (identifier.startsWith(Group.IDENTIFIER_PREFIX)) { identifier = identifier.substring(Group.IDENTIFIER_PREFIX.length()); Group roleGroup = ctxt.groups().getGroup(identifier); if (roleGroup != null) { ctxt.roles().save(new RoleAssignment(role.getRole(), - roleGroup, managedDv, privateUrlToken),false); + roleGroup, managedDv, privateUrlToken), false); } } } @@ -150,11 +160,9 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } managedDv.setPermissionModificationTime(new Timestamp(new Date().getTime())); - // TODO: save is called here and above; we likely don't need both - managedDv = ctxt.dataverses().save(managedDv); - // ctxt.index().indexDataverse(managedDv); if (facetList != null) { + managedDv.setFacetRoot(true); ctxt.facets().deleteFacetsFor(managedDv); int i = 0; for (DatasetFieldType df : facetList) { @@ -164,16 +172,19 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { if (inputLevelList != null) { ctxt.fieldTypeInputLevels().deleteFacetsFor(managedDv); - for (DataverseFieldTypeInputLevel obj : inputLevelList) { - obj.setDataverse(managedDv); - ctxt.fieldTypeInputLevels().create(obj); + for (DataverseFieldTypeInputLevel inputLevel : inputLevelList) { + inputLevel.setDataverse(managedDv); + ctxt.fieldTypeInputLevels().create(inputLevel); } } + + // TODO: save is called here and above; we likely don't need both + managedDv = ctxt.dataverses().save(managedDv); return managedDv; } - + @Override - public boolean onSuccess(CommandContext ctxt, Object r) { + public boolean onSuccess(CommandContext ctxt, Object r) { return ctxt.dataverses().index((Dataverse) r); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index d939aa8afce..3943ffcd186 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -933,16 +933,12 @@ public void testAddDataverse() { String apiToken = UtilIT.getApiTokenFromResponse(createUser); String testAliasSuffix = "-add-dataverse"; - // Without optional input levels and facet ids - String testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; - Response createSubDataverseWithInputLevelsAndFacetIdsResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root"); - createSubDataverseWithInputLevelsAndFacetIdsResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - // With optional input levels and facet ids String[] testInputLevelNames = {"geographicCoverage", "country"}; String[] testFacetIds = {"authorName", "authorAffiliation"}; - testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; - createSubDataverseWithInputLevelsAndFacetIdsResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInputLevelNames, testFacetIds); + String[] testMetadataBlockNames = {"citation", "geospatial"}; + String testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; + Response createSubDataverseWithInputLevelsAndFacetIdsResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInputLevelNames, testFacetIds, testMetadataBlockNames); createSubDataverseWithInputLevelsAndFacetIdsResponse.then().assertThat().statusCode(CREATED.getStatusCode()); // Assert facets are configured @@ -956,6 +952,7 @@ public void testAddDataverse() { // Assert input levels are configured Response listDataverseInputLevelsResponse = UtilIT.listDataverseInputLevels(testDataverseAlias, apiToken); + listDataverseInputLevelsResponse.prettyPrint(); String actualInputLevelName1 = listDataverseInputLevelsResponse.then().extract().path("data[0].datasetFieldTypeName"); String actualInputLevelName2 = listDataverseInputLevelsResponse.then().extract().path("data[1].datasetFieldTypeName"); assertNotEquals(actualFacetName1, actualFacetName2); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 68dc93945fd..13b96e50cad 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -360,10 +360,10 @@ static Response createDataverse(String alias, String category, String apiToken) } static Response createSubDataverse(String alias, String category, String apiToken, String parentDV) { - return createSubDataverse(alias, category, apiToken, parentDV, null, null); + return createSubDataverse(alias, category, apiToken, parentDV, null, null, null); } - static Response createSubDataverse(String alias, String category, String apiToken, String parentDV, String[] inputLevelNames, String[] facetIds) { + static Response createSubDataverse(String alias, String category, String apiToken, String parentDV, String[] inputLevelNames, String[] facetIds, String[] metadataBlockNames) { JsonArrayBuilder contactArrayBuilder = Json.createArrayBuilder(); contactArrayBuilder.add(Json.createObjectBuilder().add("contactEmail", getEmailFromUserName(getRandomIdentifier()))); JsonArrayBuilder subjectArrayBuilder = Json.createArrayBuilder(); @@ -376,6 +376,8 @@ static Response createSubDataverse(String alias, String category, String apiToke // don't send "dataverseType" if category is null, must be a better way .add(category != null ? "dataverseType" : "notTheKeyDataverseType", category != null ? category : "whatever"); + JsonObjectBuilder metadataBlocksObjectBuilder = Json.createObjectBuilder(); + if (inputLevelNames != null) { JsonArrayBuilder inputLevelsArrayBuilder = Json.createArrayBuilder(); for(String inputLevelName : inputLevelNames) { @@ -385,9 +387,19 @@ static Response createSubDataverse(String alias, String category, String apiToke .add("include", true) ); } - objectBuilder.add("inputLevels", inputLevelsArrayBuilder); + metadataBlocksObjectBuilder.add("inputLevels", inputLevelsArrayBuilder); } + if (metadataBlockNames != null) { + JsonArrayBuilder metadataBlockNamesArrayBuilder = Json.createArrayBuilder(); + for(String metadataBlockName : metadataBlockNames) { + metadataBlockNamesArrayBuilder.add(metadataBlockName); + } + metadataBlocksObjectBuilder.add("metadataBlockNames", metadataBlockNamesArrayBuilder); + } + + objectBuilder.add("metadataBlocks", metadataBlocksObjectBuilder); + if (facetIds != null) { JsonArrayBuilder facetIdsArrayBuilder = Json.createArrayBuilder(); for(String facetId : facetIds) { From ca3d1031d6eff53ed78c20a65c57f31e979b4e78 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 28 Jun 2024 16:15:05 +0100 Subject: [PATCH 06/21] Changed: setting dataverse facet root only if facet list is not empty in CreateDataverseCommand --- .../engine/command/impl/CreateDataverseCommand.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index 2cb0b2245b1..94f322c447c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -162,8 +162,12 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { managedDv.setPermissionModificationTime(new Timestamp(new Date().getTime())); if (facetList != null) { - managedDv.setFacetRoot(true); ctxt.facets().deleteFacetsFor(managedDv); + + if (!facetList.isEmpty()) { + managedDv.setFacetRoot(true); + } + int i = 0; for (DatasetFieldType df : facetList) { ctxt.facets().create(i++, df, managedDv); From 3db5b50a84def7d26c80acff8353c66d0cb32067 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 28 Jun 2024 16:17:25 +0100 Subject: [PATCH 07/21] Removed: prettyPrint call --- src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 3943ffcd186..91afe2aa86c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -952,7 +952,6 @@ public void testAddDataverse() { // Assert input levels are configured Response listDataverseInputLevelsResponse = UtilIT.listDataverseInputLevels(testDataverseAlias, apiToken); - listDataverseInputLevelsResponse.prettyPrint(); String actualInputLevelName1 = listDataverseInputLevelsResponse.then().extract().path("data[0].datasetFieldTypeName"); String actualInputLevelName2 = listDataverseInputLevelsResponse.then().extract().path("data[1].datasetFieldTypeName"); assertNotEquals(actualFacetName1, actualFacetName2); From 97cca69a5f48ed25f47b7723d6bab8cc43fa5fd8 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 28 Jun 2024 16:54:34 +0100 Subject: [PATCH 08/21] Added: addInputLevelsMetadataBlocksIfNotPresent method to CreateDataverseCommand --- .../edu/harvard/iq/dataverse/Dataverse.java | 14 +++++++++++++- .../command/impl/CreateDataverseCommand.java | 3 +++ .../UpdateDataverseInputLevelsCommand.java | 18 +----------------- 3 files changed, 17 insertions(+), 18 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java index 78b1827c798..7b74b7c19f2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java @@ -30,7 +30,6 @@ import jakarta.persistence.OneToOne; import jakarta.persistence.OrderBy; import jakarta.persistence.Table; -import jakarta.persistence.Transient; import jakarta.validation.constraints.NotNull; import jakarta.validation.constraints.Pattern; import jakarta.validation.constraints.Size; @@ -828,4 +827,17 @@ public boolean isAncestorOf( DvObject other ) { public String getLocalURL() { return SystemConfig.getDataverseSiteUrlStatic() + "/dataverse/" + this.getAlias(); } + + public void addInputLevelsMetadataBlocksIfNotPresent(List inputLevels) { + for (DataverseFieldTypeInputLevel inputLevel : inputLevels) { + MetadataBlock inputLevelMetadataBlock = inputLevel.getDatasetFieldType().getMetadataBlock(); + if (!hasMetadataBlock(inputLevelMetadataBlock)) { + metadataBlocks.add(inputLevelMetadataBlock); + } + } + } + + private boolean hasMetadataBlock(MetadataBlock metadataBlock) { + return metadataBlocks.stream().anyMatch(block -> block.getId().equals(metadataBlock.getId())); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index 94f322c447c..489b36e7cef 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -175,6 +175,9 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } if (inputLevelList != null) { + if (!inputLevelList.isEmpty()) { + managedDv.addInputLevelsMetadataBlocksIfNotPresent(inputLevelList); + } ctxt.fieldTypeInputLevels().deleteFacetsFor(managedDv); for (DataverseFieldTypeInputLevel inputLevel : inputLevelList) { inputLevel.setDataverse(managedDv); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseInputLevelsCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseInputLevelsCommand.java index cf7b4a6f69c..b9b08992919 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseInputLevelsCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseInputLevelsCommand.java @@ -2,7 +2,6 @@ import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevel; -import edu.harvard.iq.dataverse.MetadataBlock; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; @@ -29,23 +28,8 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { if (inputLevelList == null || inputLevelList.isEmpty()) { throw new CommandException("Error while updating dataverse input levels: Input level list cannot be null or empty", this); } - addInputLevelMetadataBlocks(); + dataverse.addInputLevelsMetadataBlocksIfNotPresent(inputLevelList); dataverse.setMetadataBlockRoot(true); return ctxt.engine().submit(new UpdateDataverseCommand(dataverse, null, null, getRequest(), inputLevelList)); } - - private void addInputLevelMetadataBlocks() { - List dataverseMetadataBlocks = dataverse.getMetadataBlocks(); - for (DataverseFieldTypeInputLevel inputLevel : inputLevelList) { - MetadataBlock inputLevelMetadataBlock = inputLevel.getDatasetFieldType().getMetadataBlock(); - if (!dataverseHasMetadataBlock(dataverseMetadataBlocks, inputLevelMetadataBlock)) { - dataverseMetadataBlocks.add(inputLevelMetadataBlock); - } - } - dataverse.setMetadataBlocks(dataverseMetadataBlocks); - } - - private boolean dataverseHasMetadataBlock(List dataverseMetadataBlocks, MetadataBlock metadataBlock) { - return dataverseMetadataBlocks.stream().anyMatch(block -> block.getId().equals(metadataBlock.getId())); - } } From 4bc644c2f60526befb3eb15f8711a5016e30c852 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 1 Jul 2024 10:50:18 +0100 Subject: [PATCH 09/21] Added: invalid metadata block name error message to bundle strings --- .../edu/harvard/iq/dataverse/api/Dataverses.java | 12 ++++++------ src/main/java/propertyFiles/Bundle.properties | 1 + 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 1ab86666947..5ae26d31330 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -144,8 +144,8 @@ public Response addDataverse(@Context ContainerRequestContext crc, String body, JsonArray inputLevelsArray = metadataBlocksJson.getJsonArray("inputLevels"); inputLevels = inputLevelsArray != null ? parseInputLevels(inputLevelsArray, newDataverse) : null; - JsonArray selectedMetadataBlocksArray = metadataBlocksJson.getJsonArray("metadataBlockNames"); - selectedMetadataBlocks = selectedMetadataBlocksArray != null ? parseNewDataverseMetadataBlocks(selectedMetadataBlocksArray) : null; + JsonArray newMetadataBlockNamesArray = metadataBlocksJson.getJsonArray("metadataBlockNames"); + selectedMetadataBlocks = newMetadataBlockNamesArray != null ? parseNewDataverseMetadataBlocks(newMetadataBlockNamesArray) : null; } JsonArray facetIdsArray = newDataverseJson.getJsonArray("facetIds"); @@ -192,13 +192,13 @@ public Response addDataverse(@Context ContainerRequestContext crc, String body, } } - private List parseNewDataverseMetadataBlocks(JsonArray selectedMetadataBlocksArray) throws WrappedResponse { + private List parseNewDataverseMetadataBlocks(JsonArray metadataBlockNamesArray) throws WrappedResponse { List selectedMetadataBlocks = new ArrayList<>(); - for (JsonString metadataBlockName : selectedMetadataBlocksArray.getValuesAs(JsonString.class)) { + for (JsonString metadataBlockName : metadataBlockNamesArray.getValuesAs(JsonString.class)) { MetadataBlock metadataBlock = metadataBlockSvc.findByName(metadataBlockName.getString()); if (metadataBlock == null) { - // TODO - throw new WrappedResponse(badRequest("TODO")); + String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.metadatablocks.error.invalidmetadatablockname"), metadataBlockName); + throw new WrappedResponse(badRequest(errorMessage)); } selectedMetadataBlocks.add(metadataBlock); } diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 7ce060ed168..0f889ace233 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -950,6 +950,7 @@ dataverse.guestbookentry.atrequest=Guestbook Entry At Access Request dataverse.inputlevels.error.invalidfieldtypename=Invalid dataset field type name: {0} dataverse.facets.error.fieldtypenotfound=Can't find dataset field type '{0}' dataverse.facets.error.fieldtypenotfacetable=Dataset field type '{0}' is not facetable +dataverse.metadatablocks.error.invalidmetadatablockname=Invalid metadata block name: {0} # rolesAndPermissionsFragment.xhtml # advanced.xhtml From d47396b74f0cbcbf32a7a854a726ec3a80958991 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 1 Jul 2024 15:32:45 +0100 Subject: [PATCH 10/21] Added: error test cases to addDataverse IT --- .../harvard/iq/dataverse/api/Dataverses.java | 8 ++--- .../iq/dataverse/api/DataversesIT.java | 36 +++++++++++++++++-- 2 files changed, 38 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 5ae26d31330..b0f8e90252c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -139,13 +139,13 @@ public Response addDataverse(@Context ContainerRequestContext crc, String body, try { JsonObject metadataBlocksJson = newDataverseJson.getJsonObject("metadataBlocks"); List inputLevels = null; - List selectedMetadataBlocks = null; + List metadataBlocks = null; if (metadataBlocksJson != null) { JsonArray inputLevelsArray = metadataBlocksJson.getJsonArray("inputLevels"); inputLevels = inputLevelsArray != null ? parseInputLevels(inputLevelsArray, newDataverse) : null; - JsonArray newMetadataBlockNamesArray = metadataBlocksJson.getJsonArray("metadataBlockNames"); - selectedMetadataBlocks = newMetadataBlockNamesArray != null ? parseNewDataverseMetadataBlocks(newMetadataBlockNamesArray) : null; + JsonArray metadataBlockNamesArray = metadataBlocksJson.getJsonArray("metadataBlockNames"); + metadataBlocks = metadataBlockNamesArray != null ? parseNewDataverseMetadataBlocks(metadataBlockNamesArray) : null; } JsonArray facetIdsArray = newDataverseJson.getJsonArray("facetIds"); @@ -162,7 +162,7 @@ public Response addDataverse(@Context ContainerRequestContext crc, String body, } AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); - newDataverse = execCommand(new CreateDataverseCommand(newDataverse, createDataverseRequest(u), facetList, inputLevels, selectedMetadataBlocks)); + newDataverse = execCommand(new CreateDataverseCommand(newDataverse, createDataverseRequest(u), facetList, inputLevels, metadataBlocks)); return created("/dataverses/" + newDataverse.getAlias(), json(newDataverse)); } catch (WrappedResponse ww) { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 91afe2aa86c..6c78db4d758 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -933,12 +933,17 @@ public void testAddDataverse() { String apiToken = UtilIT.getApiTokenFromResponse(createUser); String testAliasSuffix = "-add-dataverse"; + // Without optional input levels and facet ids + String testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; + Response createSubDataverseWithInputLevelsAndFacetIdsResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root"); + createSubDataverseWithInputLevelsAndFacetIdsResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + // With optional input levels and facet ids String[] testInputLevelNames = {"geographicCoverage", "country"}; String[] testFacetIds = {"authorName", "authorAffiliation"}; String[] testMetadataBlockNames = {"citation", "geospatial"}; - String testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; - Response createSubDataverseWithInputLevelsAndFacetIdsResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInputLevelNames, testFacetIds, testMetadataBlockNames); + testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; + createSubDataverseWithInputLevelsAndFacetIdsResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInputLevelNames, testFacetIds, testMetadataBlockNames); createSubDataverseWithInputLevelsAndFacetIdsResponse.then().assertThat().statusCode(CREATED.getStatusCode()); // Assert facets are configured @@ -957,5 +962,32 @@ public void testAddDataverse() { assertNotEquals(actualFacetName1, actualFacetName2); assertThat(testInputLevelNames, hasItemInArray(actualInputLevelName1)); assertThat(testInputLevelNames, hasItemInArray(actualInputLevelName2)); + + // Should return error when an invalid facet id is sent + String invalidFacetId = "invalidFacetId"; + String[] testInvalidFacetIds = {"authorName", invalidFacetId}; + testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; + createSubDataverseWithInputLevelsAndFacetIdsResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInputLevelNames, testInvalidFacetIds, testMetadataBlockNames); + createSubDataverseWithInputLevelsAndFacetIdsResponse.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("Cant find dataset field type \"" + invalidFacetId + "\"")); + + // Should return error when an invalid input level is sent + String invalidInputLevelName = "wrongInputLevel"; + String[] testInvalidInputLevelNames = {"geographicCoverage", invalidInputLevelName}; + testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; + createSubDataverseWithInputLevelsAndFacetIdsResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInvalidInputLevelNames, testFacetIds, testMetadataBlockNames); + createSubDataverseWithInputLevelsAndFacetIdsResponse.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("Invalid dataset field type name: " + invalidInputLevelName)); + + // Should return error when an invalid metadata block name is sent + String invalidMetadataBlockName = "invalidMetadataBlockName"; + String[] testInvalidMetadataBlockNames = {"citation", invalidMetadataBlockName}; + testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; + createSubDataverseWithInputLevelsAndFacetIdsResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInputLevelNames, testInvalidFacetIds, testInvalidMetadataBlockNames); + createSubDataverseWithInputLevelsAndFacetIdsResponse.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("Invalid metadata block name: \"" + invalidMetadataBlockName + "\"")); } } From d49aa70fb3fc6ca7e8446dfb35a9731a1c710da2 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 2 Jul 2024 10:13:36 +0100 Subject: [PATCH 11/21] Added: docs for addDataverse API endpoint --- .../dataverse-complete-optional-params.json | 65 +++++++++++++++++++ doc/sphinx-guides/source/api/native-api.rst | 8 +++ 2 files changed, 73 insertions(+) create mode 100644 doc/sphinx-guides/source/_static/api/dataverse-complete-optional-params.json diff --git a/doc/sphinx-guides/source/_static/api/dataverse-complete-optional-params.json b/doc/sphinx-guides/source/_static/api/dataverse-complete-optional-params.json new file mode 100644 index 00000000000..fef32aa1e2c --- /dev/null +++ b/doc/sphinx-guides/source/_static/api/dataverse-complete-optional-params.json @@ -0,0 +1,65 @@ +{ + "name": "Scientific Research", + "alias": "science", + "dataverseContacts": [ + { + "contactEmail": "pi@example.edu" + }, + { + "contactEmail": "student@example.edu" + } + ], + "affiliation": "Scientific Research University", + "description": "We do all the science.", + "dataverseType": "LABORATORY", + "metadataBlocks": { + "metadataBlockNames": [ + "citation", "geospatial" + ], + "inputLevels": [ + { + "datasetFieldTypeName": "geographicCoverage", + "include": true, + "required": true + }, + { + "datasetFieldTypeName": "country", + "include": true, + "required": true + }, + { + "datasetFieldTypeName": "geographicUnit", + "include": false, + "required": false + }, + { + "datasetFieldTypeName": "geographicBoundingBox", + "include": false, + "required": false + }, + { + "datasetFieldTypeName": "westLongitude", + "include": false, + "required": false + }, + { + "datasetFieldTypeName": "eastLongitude", + "include": false, + "required": false + }, + { + "datasetFieldTypeName": "northLatitude", + "include": false, + "required": false + }, + { + "datasetFieldTypeName": "southLatitude", + "include": false, + "required": false + } + ], + "facetIds": [ + "authorName", "authorAffiliation" + ] + } +} diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 894f84d2aac..947537447ef 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -66,6 +66,14 @@ The fully expanded example above (without environment variables) looks like this You should expect an HTTP 200 response and JSON beginning with "status":"OK" followed by a representation of the newly-created Dataverse collection. +The request JSON supports an optional ``metadataBlocks`` object, with the following supported sub-objects: + +- ``metadataBlockNames``: The names of the metadata blocks you want to add to the Dataverse collection. +- ``inputLevels``: The names of the fields in each metadata block for which you want to add a custom configuration regarding their inclusion or requirement when creating and editing datasets in the new Dataverse collection. Note that if the corresponding metadata blocks names are not specified in the ``metadataBlockNames``` field, they will be added automatically to the Dataverse collection. +- ``facetIds``: The names of the fields to use as facets for browsing datasets and collections in the new Dataverse collection. + +To obtain an example of how these objects are included in the JSON file, download :download:`dataverse-complete-optional-params.json <../_static/api/dataverse-complete-optional-params.json>` file and modify it to suit your needs. + .. _view-dataverse: View a Dataverse Collection From fd88c928d820e6d9eccb55ebb4593a34c55c4014 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 2 Jul 2024 12:44:19 +0100 Subject: [PATCH 12/21] Fixed: failing unit tests --- src/main/java/edu/harvard/iq/dataverse/Dataverse.java | 2 +- src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java index 7b74b7c19f2..fd51e2789f5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java @@ -586,7 +586,7 @@ public void setDataverseTheme(DataverseTheme dataverseTheme) { } public void setMetadataBlocks(List metadataBlocks) { - this.metadataBlocks = metadataBlocks; + this.metadataBlocks = new ArrayList<>(metadataBlocks); } public List getCitationDatasetFieldTypes() { diff --git a/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java b/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java index 927d288d660..1b66ca56b23 100644 --- a/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java +++ b/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java @@ -196,6 +196,7 @@ public static DatasetFieldType makeDatasetFieldType() { DatasetFieldType retVal = new DatasetFieldType("SampleType-"+id, FieldType.TEXT, false); retVal.setId(id); MetadataBlock mdb = new MetadataBlock(); + mdb.setId(new Random().nextLong()); mdb.setName("Test"); retVal.setMetadataBlock(mdb); return retVal; From 9194865aae2be01715c1eae0537227d749659642 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 2 Jul 2024 12:55:04 +0100 Subject: [PATCH 13/21] Added: docs for inputLevels GET API endpoint --- doc/sphinx-guides/source/api/native-api.rst | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 947537447ef..5c0f57cab45 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -266,6 +266,25 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/metadatablockfacets" +List Field Type Input Levels Configured for a Dataverse Collection +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +|CORS| List the dataverse field type input levels configured for a given Dataverse collection ``id``: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/inputLevels" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/inputLevels" + Set Metadata Block Facets for a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 2123388721e96461cbb7d7ceba9e30e0cecf1da5 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 2 Jul 2024 13:40:21 +0100 Subject: [PATCH 14/21] Removed: duplicated endpoint --- .../edu/harvard/iq/dataverse/api/Dataverses.java | 14 -------------- .../edu/harvard/iq/dataverse/api/DataversesIT.java | 9 ++++----- 2 files changed, 4 insertions(+), 19 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index b0f8e90252c..e6b34f10002 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -992,20 +992,6 @@ public Response setFacets(@Context ContainerRequestContext crc, @PathParam("iden } } - @GET - @AuthRequired - @Path("{identifier}/facets") - @Produces(MediaType.APPLICATION_JSON) - public Response getFacets(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) { - try { - Dataverse dataverse = findDataverseOrDie(dvIdtf); - List dataverseFacets = execCommand(new ListFacetsCommand(createDataverseRequest(getRequestUser(crc)), dataverse, false)); - return ok(jsonDataverseFacets(dataverseFacets)); - } catch (WrappedResponse e) { - return e.getResponse(); - } - } - @GET @AuthRequired @Path("{identifier}/metadatablockfacets") diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 6c78db4d758..bbf44fa3bc9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -948,12 +948,11 @@ public void testAddDataverse() { // Assert facets are configured Response listDataverseFacetsResponse = UtilIT.listDataverseFacets(testDataverseAlias, apiToken); - String[] expectedFacetNames = {"Author Name", "Author Affiliation"}; - String actualFacetName1 = listDataverseFacetsResponse.then().extract().path("data[0].name"); - String actualFacetName2 = listDataverseFacetsResponse.then().extract().path("data[1].name"); + String actualFacetName1 = listDataverseFacetsResponse.then().extract().path("data[0]"); + String actualFacetName2 = listDataverseFacetsResponse.then().extract().path("data[1]"); assertNotEquals(actualFacetName1, actualFacetName2); - assertThat(expectedFacetNames, hasItemInArray(actualFacetName1)); - assertThat(expectedFacetNames, hasItemInArray(actualFacetName2)); + assertThat(testFacetIds, hasItemInArray(actualFacetName1)); + assertThat(testFacetIds, hasItemInArray(actualFacetName2)); // Assert input levels are configured Response listDataverseInputLevelsResponse = UtilIT.listDataverseInputLevels(testDataverseAlias, apiToken); From b261668efc72cd270e868c89bd68fed33dfda016 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 2 Jul 2024 13:43:47 +0100 Subject: [PATCH 15/21] Removed: unused json printer method --- .../edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 8 -------- 1 file changed, 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 16456682f30..bd181d880e7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -1377,14 +1377,6 @@ public static JsonArrayBuilder jsonDataverseFieldTypeInputLevels(List facets) { - JsonArrayBuilder facetsArrayBuilder = Json.createArrayBuilder(); - for (DataverseFacet facet : facets) { - facetsArrayBuilder.add(json(facet)); - } - return facetsArrayBuilder; - } - public static JsonArrayBuilder jsonDataverseInputLevels(List inputLevels) { JsonArrayBuilder inputLevelsArrayBuilder = Json.createArrayBuilder(); for (DataverseFieldTypeInputLevel inputLevel : inputLevels) { From c9e1f07205273e3d4fae3a2eee24c9ddc95b8e8a Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 2 Jul 2024 15:08:03 +0100 Subject: [PATCH 16/21] Added: release notes for #10633 --- doc/release-notes/10633-add-dataverse-api-extension.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/10633-add-dataverse-api-extension.md diff --git a/doc/release-notes/10633-add-dataverse-api-extension.md b/doc/release-notes/10633-add-dataverse-api-extension.md new file mode 100644 index 00000000000..6b0599074da --- /dev/null +++ b/doc/release-notes/10633-add-dataverse-api-extension.md @@ -0,0 +1 @@ +The addDataverse (/api/dataverses/{identifier}) API endpoint has been extended to allow adding inputLevels and facetIds at creation time, as DataversePage does in JSF. From f4168f46c909c6b00fd8c0ed5ad8cc0de9807358 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 2 Jul 2024 15:09:49 +0100 Subject: [PATCH 17/21] Fixed: release notes for #10633 --- doc/release-notes/10633-add-dataverse-api-extension.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/10633-add-dataverse-api-extension.md b/doc/release-notes/10633-add-dataverse-api-extension.md index 6b0599074da..f5d8030e8ac 100644 --- a/doc/release-notes/10633-add-dataverse-api-extension.md +++ b/doc/release-notes/10633-add-dataverse-api-extension.md @@ -1 +1 @@ -The addDataverse (/api/dataverses/{identifier}) API endpoint has been extended to allow adding inputLevels and facetIds at creation time, as DataversePage does in JSF. +The addDataverse (/api/dataverses/{identifier}) API endpoint has been extended to allow adding metadata blocks, input levels and facet ids at creation time, as the Dataverse page in create mode does in JSF. From e65eb9f947beec06ddbfc6f0bfb73f7ecaf60d28 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 2 Jul 2024 15:30:18 +0100 Subject: [PATCH 18/21] Added: doc tweak --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 5c0f57cab45..ccaccd81bf8 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -70,7 +70,7 @@ The request JSON supports an optional ``metadataBlocks`` object, with the follow - ``metadataBlockNames``: The names of the metadata blocks you want to add to the Dataverse collection. - ``inputLevels``: The names of the fields in each metadata block for which you want to add a custom configuration regarding their inclusion or requirement when creating and editing datasets in the new Dataverse collection. Note that if the corresponding metadata blocks names are not specified in the ``metadataBlockNames``` field, they will be added automatically to the Dataverse collection. -- ``facetIds``: The names of the fields to use as facets for browsing datasets and collections in the new Dataverse collection. +- ``facetIds``: The names of the fields to use as facets for browsing datasets and collections in the new Dataverse collection. Note that the order of the facets is defined by their order in the provided JSON array. To obtain an example of how these objects are included in the JSON file, download :download:`dataverse-complete-optional-params.json <../_static/api/dataverse-complete-optional-params.json>` file and modify it to suit your needs. From f5695300bc81743e499997916d2463a456444936 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 18 Jul 2024 13:02:31 +0100 Subject: [PATCH 19/21] Added: addDataverse IT cases --- .../iq/dataverse/api/DataversesIT.java | 47 +++++++++++++++---- 1 file changed, 37 insertions(+), 10 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index a85e8aa0667..2b6881b4620 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -957,16 +957,20 @@ public void testAddDataverse() { // Without optional input levels and facet ids String testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; - Response createSubDataverseWithInputLevelsAndFacetIdsResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root"); - createSubDataverseWithInputLevelsAndFacetIdsResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + Response createSubDataverseResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root"); + createSubDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + Response listMetadataBlocksResponse = UtilIT.listMetadataBlocks(testDataverseAlias, false, false, apiToken); + listMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); + String actualMetadataBlockName = listMetadataBlocksResponse.then().extract().path("data[0].name"); + assertEquals(actualMetadataBlockName, "citation"); // With optional input levels and facet ids String[] testInputLevelNames = {"geographicCoverage", "country"}; String[] testFacetIds = {"authorName", "authorAffiliation"}; String[] testMetadataBlockNames = {"citation", "geospatial"}; testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; - createSubDataverseWithInputLevelsAndFacetIdsResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInputLevelNames, testFacetIds, testMetadataBlockNames); - createSubDataverseWithInputLevelsAndFacetIdsResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + createSubDataverseResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInputLevelNames, testFacetIds, testMetadataBlockNames); + createSubDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); // Assert facets are configured Response listDataverseFacetsResponse = UtilIT.listDataverseFacets(testDataverseAlias, apiToken); @@ -984,12 +988,35 @@ public void testAddDataverse() { assertThat(testInputLevelNames, hasItemInArray(actualInputLevelName1)); assertThat(testInputLevelNames, hasItemInArray(actualInputLevelName2)); + // Assert metadata blocks are configured + listMetadataBlocksResponse = UtilIT.listMetadataBlocks(testDataverseAlias, false, false, apiToken); + listMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); + String actualMetadataBlockName1 = listMetadataBlocksResponse.then().extract().path("data[0].name"); + String actualMetadataBlockName2 = listMetadataBlocksResponse.then().extract().path("data[1].name"); + assertNotEquals(actualMetadataBlockName1, actualMetadataBlockName2); + assertThat(testMetadataBlockNames, hasItemInArray(actualMetadataBlockName1)); + assertThat(testMetadataBlockNames, hasItemInArray(actualMetadataBlockName2)); + + // Setting metadata blocks without citation + testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; + String[] testMetadataBlockNamesWithoutCitation = {"geospatial"}; + createSubDataverseResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", null, null, testMetadataBlockNamesWithoutCitation); + createSubDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + + // Assert metadata blocks including citation are configured + String[] testExpectedBlockNames = {"citation", "geospatial"}; + actualMetadataBlockName1 = listMetadataBlocksResponse.then().extract().path("data[0].name"); + actualMetadataBlockName2 = listMetadataBlocksResponse.then().extract().path("data[1].name"); + assertNotEquals(actualMetadataBlockName1, actualMetadataBlockName2); + assertThat(testExpectedBlockNames, hasItemInArray(actualMetadataBlockName1)); + assertThat(testExpectedBlockNames, hasItemInArray(actualMetadataBlockName2)); + // Should return error when an invalid facet id is sent String invalidFacetId = "invalidFacetId"; String[] testInvalidFacetIds = {"authorName", invalidFacetId}; testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; - createSubDataverseWithInputLevelsAndFacetIdsResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInputLevelNames, testInvalidFacetIds, testMetadataBlockNames); - createSubDataverseWithInputLevelsAndFacetIdsResponse.then().assertThat() + createSubDataverseResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInputLevelNames, testInvalidFacetIds, testMetadataBlockNames); + createSubDataverseResponse.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()) .body("message", equalTo("Cant find dataset field type \"" + invalidFacetId + "\"")); @@ -997,8 +1024,8 @@ public void testAddDataverse() { String invalidInputLevelName = "wrongInputLevel"; String[] testInvalidInputLevelNames = {"geographicCoverage", invalidInputLevelName}; testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; - createSubDataverseWithInputLevelsAndFacetIdsResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInvalidInputLevelNames, testFacetIds, testMetadataBlockNames); - createSubDataverseWithInputLevelsAndFacetIdsResponse.then().assertThat() + createSubDataverseResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInvalidInputLevelNames, testFacetIds, testMetadataBlockNames); + createSubDataverseResponse.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()) .body("message", equalTo("Invalid dataset field type name: " + invalidInputLevelName)); @@ -1006,8 +1033,8 @@ public void testAddDataverse() { String invalidMetadataBlockName = "invalidMetadataBlockName"; String[] testInvalidMetadataBlockNames = {"citation", invalidMetadataBlockName}; testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; - createSubDataverseWithInputLevelsAndFacetIdsResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInputLevelNames, testInvalidFacetIds, testInvalidMetadataBlockNames); - createSubDataverseWithInputLevelsAndFacetIdsResponse.then().assertThat() + createSubDataverseResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInputLevelNames, testInvalidFacetIds, testInvalidMetadataBlockNames); + createSubDataverseResponse.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()) .body("message", equalTo("Invalid metadata block name: \"" + invalidMetadataBlockName + "\"")); } From 4be73095cc8940d75f41aa4223c1b312d30e2743 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 18 Jul 2024 14:01:35 +0100 Subject: [PATCH 20/21] Changed: response strings extracted to bundle properties --- src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java | 4 ++-- src/main/java/propertyFiles/Bundle.properties | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index e6b34f10002..f5fe60cb126 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -130,10 +130,10 @@ public Response addDataverse(@Context ContainerRequestContext crc, String body, newDataverse = jsonParser().parseDataverse(newDataverseJson); } catch (JsonParsingException jpe) { logger.log(Level.SEVERE, "Json: {0}", body); - return error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage()); + return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparse"), jpe.getMessage())); } catch (JsonParseException ex) { logger.log(Level.SEVERE, "Error parsing dataverse from json: " + ex.getMessage(), ex); - return error(Response.Status.BAD_REQUEST, "Error parsing the POSTed json into a dataverse: " + ex.getMessage()); + return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparsetodataverse"), ex.getMessage())); } try { diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index bdec492f5db..c2e7625093b 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -951,6 +951,8 @@ dataverse.inputlevels.error.invalidfieldtypename=Invalid dataset field type name dataverse.facets.error.fieldtypenotfound=Can't find dataset field type '{0}' dataverse.facets.error.fieldtypenotfacetable=Dataset field type '{0}' is not facetable dataverse.metadatablocks.error.invalidmetadatablockname=Invalid metadata block name: {0} +dataverse.create.error.jsonparse=Error parsing Json: {0} +dataverse.create.error.jsonparsetodataverse=Error parsing the POSTed json into a dataverse: {0} # rolesAndPermissionsFragment.xhtml # advanced.xhtml From 27adc35f6898fe43708e942bea7034c2510b5ee5 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 18 Jul 2024 15:08:16 +0100 Subject: [PATCH 21/21] Added: parse input level validation to avoid fields marked as required but not as included --- src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java | 5 +++++ src/main/java/propertyFiles/Bundle.properties | 1 + .../java/edu/harvard/iq/dataverse/api/DataversesIT.java | 7 +++++++ 3 files changed, 13 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index f5fe60cb126..81db5f7d782 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -705,6 +705,11 @@ private List parseInputLevels(JsonArray inputLevel boolean required = inputLevel.getBoolean("required"); boolean include = inputLevel.getBoolean("include"); + if (required && !include) { + String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.inputlevels.error.cannotberequiredifnotincluded"), datasetFieldTypeName); + throw new WrappedResponse(badRequest(errorMessage)); + } + newInputLevels.add(new DataverseFieldTypeInputLevel(datasetFieldType, dataverse, required, include)); } diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index c2e7625093b..aa7c00c66af 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -948,6 +948,7 @@ dataverse.metadatalanguage.setatdatasetcreation=Chosen at Dataset Creation dataverse.guestbookentry.atdownload=Guestbook Entry At Download dataverse.guestbookentry.atrequest=Guestbook Entry At Access Request dataverse.inputlevels.error.invalidfieldtypename=Invalid dataset field type name: {0} +dataverse.inputlevels.error.cannotberequiredifnotincluded=The input level for the dataset field type {0} cannot be required if it is not included dataverse.facets.error.fieldtypenotfound=Can't find dataset field type '{0}' dataverse.facets.error.fieldtypenotfacetable=Dataset field type '{0}' is not facetable dataverse.metadatablocks.error.invalidmetadatablockname=Invalid metadata block name: {0} diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 2b6881b4620..09b60e46e7e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -941,6 +941,13 @@ public void testUpdateInputLevels() { .body("message", equalTo("Invalid dataset field type name: invalid1")) .statusCode(BAD_REQUEST.getStatusCode()); + // Update input levels with invalid configuration (field required but not included) + testIncludedInputLevels = new boolean[]{false, false}; + updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInputLevelNames, testRequiredInputLevels, testIncludedInputLevels, apiToken); + updateDataverseInputLevelsResponse.then().assertThat() + .body("message", equalTo(BundleUtil.getStringFromBundle("dataverse.inputlevels.error.cannotberequiredifnotincluded", List.of("geographicCoverage")))) + .statusCode(BAD_REQUEST.getStatusCode()); + // Update invalid empty input levels testInputLevelNames = new String[]{}; updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInputLevelNames, testRequiredInputLevels, testIncludedInputLevels, apiToken);