diff --git a/doc/release-notes/10633-add-dataverse-api-extension.md b/doc/release-notes/10633-add-dataverse-api-extension.md new file mode 100644 index 00000000000..f5d8030e8ac --- /dev/null +++ b/doc/release-notes/10633-add-dataverse-api-extension.md @@ -0,0 +1 @@ +The addDataverse (/api/dataverses/{identifier}) API endpoint has been extended to allow adding metadata blocks, input levels and facet ids at creation time, as the Dataverse page in create mode does in JSF. diff --git a/doc/sphinx-guides/source/_static/api/dataverse-complete-optional-params.json b/doc/sphinx-guides/source/_static/api/dataverse-complete-optional-params.json new file mode 100644 index 00000000000..fef32aa1e2c --- /dev/null +++ b/doc/sphinx-guides/source/_static/api/dataverse-complete-optional-params.json @@ -0,0 +1,65 @@ +{ + "name": "Scientific Research", + "alias": "science", + "dataverseContacts": [ + { + "contactEmail": "pi@example.edu" + }, + { + "contactEmail": "student@example.edu" + } + ], + "affiliation": "Scientific Research University", + "description": "We do all the science.", + "dataverseType": "LABORATORY", + "metadataBlocks": { + "metadataBlockNames": [ + "citation", "geospatial" + ], + "inputLevels": [ + { + "datasetFieldTypeName": "geographicCoverage", + "include": true, + "required": true + }, + { + "datasetFieldTypeName": "country", + "include": true, + "required": true + }, + { + "datasetFieldTypeName": "geographicUnit", + "include": false, + "required": false + }, + { + "datasetFieldTypeName": "geographicBoundingBox", + "include": false, + "required": false + }, + { + "datasetFieldTypeName": "westLongitude", + "include": false, + "required": false + }, + { + "datasetFieldTypeName": "eastLongitude", + "include": false, + "required": false + }, + { + "datasetFieldTypeName": "northLatitude", + "include": false, + "required": false + }, + { + "datasetFieldTypeName": "southLatitude", + "include": false, + "required": false + } + ], + "facetIds": [ + "authorName", "authorAffiliation" + ] + } +} diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 75ee5a51f90..e7ed71f06ef 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -66,6 +66,14 @@ The fully expanded example above (without environment variables) looks like this You should expect an HTTP 200 response and JSON beginning with "status":"OK" followed by a representation of the newly-created Dataverse collection. +The request JSON supports an optional ``metadataBlocks`` object, with the following supported sub-objects: + +- ``metadataBlockNames``: The names of the metadata blocks you want to add to the Dataverse collection. +- ``inputLevels``: The names of the fields in each metadata block for which you want to add a custom configuration regarding their inclusion or requirement when creating and editing datasets in the new Dataverse collection. Note that if the corresponding metadata blocks names are not specified in the ``metadataBlockNames``` field, they will be added automatically to the Dataverse collection. +- ``facetIds``: The names of the fields to use as facets for browsing datasets and collections in the new Dataverse collection. Note that the order of the facets is defined by their order in the provided JSON array. + +To obtain an example of how these objects are included in the JSON file, download :download:`dataverse-complete-optional-params.json <../_static/api/dataverse-complete-optional-params.json>` file and modify it to suit your needs. + .. _view-dataverse: View a Dataverse Collection @@ -258,6 +266,25 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/metadatablockfacets" +List Field Type Input Levels Configured for a Dataverse Collection +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +|CORS| List the dataverse field type input levels configured for a given Dataverse collection ``id``: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/inputLevels" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/inputLevels" + Set Metadata Block Facets for a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java index 978c716e058..86e2e0207c1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java @@ -30,7 +30,6 @@ import jakarta.persistence.OneToOne; import jakarta.persistence.OrderBy; import jakarta.persistence.Table; -import jakarta.persistence.Transient; import jakarta.validation.constraints.NotNull; import jakarta.validation.constraints.Pattern; import jakarta.validation.constraints.Size; @@ -593,7 +592,7 @@ public void setDataverseTheme(DataverseTheme dataverseTheme) { } public void setMetadataBlocks(List metadataBlocks) { - this.metadataBlocks = metadataBlocks; + this.metadataBlocks = new ArrayList<>(metadataBlocks); } public List getCitationDatasetFieldTypes() { @@ -834,4 +833,17 @@ public boolean isAncestorOf( DvObject other ) { public String getLocalURL() { return SystemConfig.getDataverseSiteUrlStatic() + "/dataverse/" + this.getAlias(); } + + public void addInputLevelsMetadataBlocksIfNotPresent(List inputLevels) { + for (DataverseFieldTypeInputLevel inputLevel : inputLevels) { + MetadataBlock inputLevelMetadataBlock = inputLevel.getDatasetFieldType().getMetadataBlock(); + if (!hasMetadataBlock(inputLevelMetadataBlock)) { + metadataBlocks.add(inputLevelMetadataBlock); + } + } + } + + private boolean hasMetadataBlock(MetadataBlock metadataBlock) { + return metadataBlocks.stream().anyMatch(block -> block.getId().equals(metadataBlock.getId())); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 02b60fdb32a..81db5f7d782 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -29,6 +29,7 @@ import edu.harvard.iq.dataverse.util.ConstraintViolationUtil; import edu.harvard.iq.dataverse.util.StringUtil; import static edu.harvard.iq.dataverse.util.StringUtil.nonEmpty; +import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*; import edu.harvard.iq.dataverse.util.json.JSONLDUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; @@ -60,8 +61,7 @@ import jakarta.ws.rs.core.MediaType; import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.Response.Status; -import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray; -import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; + import java.io.IOException; import java.io.OutputStream; import java.text.MessageFormat; @@ -123,35 +123,47 @@ public Response addRoot(@Context ContainerRequestContext crc, String body) { @AuthRequired @Path("{identifier}") public Response addDataverse(@Context ContainerRequestContext crc, String body, @PathParam("identifier") String parentIdtf) { - - Dataverse d; - JsonObject dvJson; + Dataverse newDataverse; + JsonObject newDataverseJson; try { - dvJson = JsonUtil.getJsonObject(body); - d = jsonParser().parseDataverse(dvJson); + newDataverseJson = JsonUtil.getJsonObject(body); + newDataverse = jsonParser().parseDataverse(newDataverseJson); } catch (JsonParsingException jpe) { logger.log(Level.SEVERE, "Json: {0}", body); - return error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage()); + return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparse"), jpe.getMessage())); } catch (JsonParseException ex) { logger.log(Level.SEVERE, "Error parsing dataverse from json: " + ex.getMessage(), ex); - return error(Response.Status.BAD_REQUEST, - "Error parsing the POSTed json into a dataverse: " + ex.getMessage()); + return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparsetodataverse"), ex.getMessage())); } try { + JsonObject metadataBlocksJson = newDataverseJson.getJsonObject("metadataBlocks"); + List inputLevels = null; + List metadataBlocks = null; + if (metadataBlocksJson != null) { + JsonArray inputLevelsArray = metadataBlocksJson.getJsonArray("inputLevels"); + inputLevels = inputLevelsArray != null ? parseInputLevels(inputLevelsArray, newDataverse) : null; + + JsonArray metadataBlockNamesArray = metadataBlocksJson.getJsonArray("metadataBlockNames"); + metadataBlocks = metadataBlockNamesArray != null ? parseNewDataverseMetadataBlocks(metadataBlockNamesArray) : null; + } + + JsonArray facetIdsArray = newDataverseJson.getJsonArray("facetIds"); + List facetList = facetIdsArray != null ? parseFacets(facetIdsArray) : null; + if (!parentIdtf.isEmpty()) { Dataverse owner = findDataverseOrDie(parentIdtf); - d.setOwner(owner); + newDataverse.setOwner(owner); } // set the dataverse - contact relationship in the contacts - for (DataverseContact dc : d.getDataverseContacts()) { - dc.setDataverse(d); + for (DataverseContact dc : newDataverse.getDataverseContacts()) { + dc.setDataverse(newDataverse); } AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); - d = execCommand(new CreateDataverseCommand(d, createDataverseRequest(u), null, null)); - return created("/dataverses/" + d.getAlias(), json(d)); + newDataverse = execCommand(new CreateDataverseCommand(newDataverse, createDataverseRequest(u), facetList, inputLevels, metadataBlocks)); + return created("/dataverses/" + newDataverse.getAlias(), json(newDataverse)); } catch (WrappedResponse ww) { String error = ConstraintViolationUtil.getErrorStringForConstraintViolations(ww.getCause()); @@ -179,7 +191,21 @@ public Response addDataverse(@Context ContainerRequestContext crc, String body, } } - + + private List parseNewDataverseMetadataBlocks(JsonArray metadataBlockNamesArray) throws WrappedResponse { + List selectedMetadataBlocks = new ArrayList<>(); + for (JsonString metadataBlockName : metadataBlockNamesArray.getValuesAs(JsonString.class)) { + MetadataBlock metadataBlock = metadataBlockSvc.findByName(metadataBlockName.getString()); + if (metadataBlock == null) { + String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.metadatablocks.error.invalidmetadatablockname"), metadataBlockName); + throw new WrappedResponse(badRequest(errorMessage)); + } + selectedMetadataBlocks.add(metadataBlock); + } + + return selectedMetadataBlocks; + } + @POST @AuthRequired @Path("{identifier}/validateDatasetJson") @@ -637,13 +663,26 @@ public Response updateAttribute(@Context ContainerRequestContext crc, @PathParam } } + @GET + @AuthRequired + @Path("{identifier}/inputLevels") + public Response getInputLevels(@Context ContainerRequestContext crc, @PathParam("identifier") String identifier) { + try { + Dataverse dataverse = findDataverseOrDie(identifier); + List inputLevels = execCommand(new ListDataverseInputLevelsCommand(createDataverseRequest(getRequestUser(crc)), dataverse)); + return ok(jsonDataverseInputLevels(inputLevels)); + } catch (WrappedResponse e) { + return e.getResponse(); + } + } + @PUT @AuthRequired @Path("{identifier}/inputLevels") public Response updateInputLevels(@Context ContainerRequestContext crc, @PathParam("identifier") String identifier, String jsonBody) { try { Dataverse dataverse = findDataverseOrDie(identifier); - List newInputLevels = parseInputLevels(jsonBody, dataverse); + List newInputLevels = parseInputLevels(Json.createReader(new StringReader(jsonBody)).readArray(), dataverse); execCommand(new UpdateDataverseInputLevelsCommand(dataverse, createDataverseRequest(getRequestUser(crc)), newInputLevels)); return ok(BundleUtil.getStringFromBundle("dataverse.update.success"), JsonPrinter.json(dataverse)); } catch (WrappedResponse e) { @@ -651,9 +690,7 @@ public Response updateInputLevels(@Context ContainerRequestContext crc, @PathPar } } - private List parseInputLevels(String jsonBody, Dataverse dataverse) throws WrappedResponse { - JsonArray inputLevelsArray = Json.createReader(new StringReader(jsonBody)).readArray(); - + private List parseInputLevels(JsonArray inputLevelsArray, Dataverse dataverse) throws WrappedResponse { List newInputLevels = new ArrayList<>(); for (JsonValue value : inputLevelsArray) { JsonObject inputLevel = (JsonObject) value; @@ -661,19 +698,38 @@ private List parseInputLevels(String jsonBody, Dat DatasetFieldType datasetFieldType = datasetFieldSvc.findByName(datasetFieldTypeName); if (datasetFieldType == null) { - String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.updateinputlevels.error.invalidfieldtypename"), datasetFieldTypeName); + String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.inputlevels.error.invalidfieldtypename"), datasetFieldTypeName); throw new WrappedResponse(badRequest(errorMessage)); } boolean required = inputLevel.getBoolean("required"); boolean include = inputLevel.getBoolean("include"); + if (required && !include) { + String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.inputlevels.error.cannotberequiredifnotincluded"), datasetFieldTypeName); + throw new WrappedResponse(badRequest(errorMessage)); + } + newInputLevels.add(new DataverseFieldTypeInputLevel(datasetFieldType, dataverse, required, include)); } return newInputLevels; } + private List parseFacets(JsonArray facetsArray) throws WrappedResponse { + List facets = new LinkedList<>(); + for (JsonString facetId : facetsArray.getValuesAs(JsonString.class)) { + DatasetFieldType dsfType = findDatasetFieldType(facetId.getString()); + if (dsfType == null) { + throw new WrappedResponse(badRequest(MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.facets.error.fieldtypenotfound"), facetId))); + } else if (!dsfType.isFacetable()) { + throw new WrappedResponse(badRequest(MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.facets.error.fieldtypenotfacetable"), facetId))); + } + facets.add(dsfType); + } + return facets; + } + @DELETE @AuthRequired @Path("{linkingDataverseId}/deleteLink/{linkedDataverseId}") @@ -922,16 +978,12 @@ public Response deleteFeaturedCollections(@Context ContainerRequestContext crc, * (judging by the UI). This triggers a 500 when '-d @foo.json' is used. */ public Response setFacets(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, String facetIds) { - - List facets = new LinkedList<>(); - for (JsonString facetId : Util.asJsonArray(facetIds).getValuesAs(JsonString.class)) { - DatasetFieldType dsfType = findDatasetFieldType(facetId.getString()); - if (dsfType == null) { - return error(Response.Status.BAD_REQUEST, "Can't find dataset field type '" + facetId + "'"); - } else if (!dsfType.isFacetable()) { - return error(Response.Status.BAD_REQUEST, "Dataset field type '" + facetId + "' is not facetable"); - } - facets.add(dsfType); + JsonArray jsonArray = Util.asJsonArray(facetIds); + List facets; + try { + facets = parseFacets(jsonArray); + } catch (WrappedResponse e) { + return e.getResponse(); } try { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index 3efefe90681..489b36e7cef 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -1,17 +1,11 @@ package edu.harvard.iq.dataverse.engine.command.impl; -import edu.harvard.iq.dataverse.DatasetFieldType; -import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevel; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.authorization.DataverseRole; -import edu.harvard.iq.dataverse.RoleAssignment; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.groups.Group; -import edu.harvard.iq.dataverse.authorization.groups.GroupProvider; -import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupProvider; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; -import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -19,15 +13,12 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import java.io.IOException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.List; -import java.util.logging.Logger; -import org.apache.solr.client.solrj.SolrServerException; /** * TODO make override the date and user more active, so prevent code errors. @@ -38,14 +29,23 @@ @RequiredPermissions(Permission.AddDataverse) public class CreateDataverseCommand extends AbstractCommand { - private static final Logger logger = Logger.getLogger(CreateDataverseCommand.class.getName()); - private final Dataverse created; private final List inputLevelList; private final List facetList; + private final List metadataBlocks; + + public CreateDataverseCommand(Dataverse created, + DataverseRequest aRequest, + List facetList, + List inputLevelList) { + this(created, aRequest, facetList, inputLevelList, null); + } - public CreateDataverseCommand(Dataverse created, DataverseRequest aRequest, List facetList, - List inputLevelList) { + public CreateDataverseCommand(Dataverse created, + DataverseRequest aRequest, + List facetList, + List inputLevelList, + List metadataBlocks) { super(aRequest, created.getOwner()); this.created = created; if (facetList != null) { @@ -58,6 +58,11 @@ public CreateDataverseCommand(Dataverse created, DataverseRequest aRequest, List } else { this.inputLevelList = null; } + if (metadataBlocks != null) { + this.metadataBlocks = new ArrayList<>(metadataBlocks); + } else { + this.metadataBlocks = null; + } } @Override @@ -70,6 +75,11 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } } + if (metadataBlocks != null && !metadataBlocks.isEmpty()) { + created.setMetadataBlockRoot(true); + created.setMetadataBlocks(metadataBlocks); + } + if (created.getCreateDate() == null) { created.setCreateDate(new Timestamp(new Date().getTime())); } @@ -97,8 +107,8 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { if (ctxt.dataverses().findByAlias(created.getAlias()) != null) { throw new IllegalCommandException("A dataverse with alias " + created.getAlias() + " already exists", this); } - - if(created.getFilePIDsEnabled()!=null && !ctxt.settings().isTrueForKey(SettingsServiceBean.Key.AllowEnablingFilePIDsPerCollection, false)) { + + if (created.getFilePIDsEnabled() != null && !ctxt.settings().isTrueForKey(SettingsServiceBean.Key.AllowEnablingFilePIDsPerCollection, false)) { throw new IllegalCommandException("File PIDs cannot be enabled per collection", this); } @@ -109,7 +119,7 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { DataverseRole adminRole = ctxt.roles().findBuiltinRoleByAlias(DataverseRole.ADMIN); String privateUrlToken = null; - ctxt.roles().save(new RoleAssignment(adminRole, getRequest().getUser(), managedDv, privateUrlToken),false); + ctxt.roles().save(new RoleAssignment(adminRole, getRequest().getUser(), managedDv, privateUrlToken), false); // Add additional role assignments if inheritance is set boolean inheritAllRoles = false; String rolesString = ctxt.settings().getValueForKey(SettingsServiceBean.Key.InheritParentRoleAssignments, ""); @@ -129,18 +139,18 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { // above... if ((inheritAllRoles || rolesToInherit.contains(role.getRole().getAlias())) && !(role.getAssigneeIdentifier().equals(getRequest().getUser().getIdentifier()) - && role.getRole().equals(adminRole))) { + && role.getRole().equals(adminRole))) { String identifier = role.getAssigneeIdentifier(); if (identifier.startsWith(AuthenticatedUser.IDENTIFIER_PREFIX)) { identifier = identifier.substring(AuthenticatedUser.IDENTIFIER_PREFIX.length()); ctxt.roles().save(new RoleAssignment(role.getRole(), - ctxt.authentication().getAuthenticatedUser(identifier), managedDv, privateUrlToken),false); + ctxt.authentication().getAuthenticatedUser(identifier), managedDv, privateUrlToken), false); } else if (identifier.startsWith(Group.IDENTIFIER_PREFIX)) { identifier = identifier.substring(Group.IDENTIFIER_PREFIX.length()); Group roleGroup = ctxt.groups().getGroup(identifier); if (roleGroup != null) { ctxt.roles().save(new RoleAssignment(role.getRole(), - roleGroup, managedDv, privateUrlToken),false); + roleGroup, managedDv, privateUrlToken), false); } } } @@ -150,12 +160,14 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } managedDv.setPermissionModificationTime(new Timestamp(new Date().getTime())); - // TODO: save is called here and above; we likely don't need both - managedDv = ctxt.dataverses().save(managedDv); - // ctxt.index().indexDataverse(managedDv); if (facetList != null) { ctxt.facets().deleteFacetsFor(managedDv); + + if (!facetList.isEmpty()) { + managedDv.setFacetRoot(true); + } + int i = 0; for (DatasetFieldType df : facetList) { ctxt.facets().create(i++, df, managedDv); @@ -163,17 +175,23 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } if (inputLevelList != null) { + if (!inputLevelList.isEmpty()) { + managedDv.addInputLevelsMetadataBlocksIfNotPresent(inputLevelList); + } ctxt.fieldTypeInputLevels().deleteFacetsFor(managedDv); - for (DataverseFieldTypeInputLevel obj : inputLevelList) { - obj.setDataverse(managedDv); - ctxt.fieldTypeInputLevels().create(obj); + for (DataverseFieldTypeInputLevel inputLevel : inputLevelList) { + inputLevel.setDataverse(managedDv); + ctxt.fieldTypeInputLevels().create(inputLevel); } } + + // TODO: save is called here and above; we likely don't need both + managedDv = ctxt.dataverses().save(managedDv); return managedDv; } - + @Override - public boolean onSuccess(CommandContext ctxt, Object r) { + public boolean onSuccess(CommandContext ctxt, Object r) { return ctxt.dataverses().index((Dataverse) r); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseInputLevelsCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseInputLevelsCommand.java new file mode 100644 index 00000000000..1727ac9698f --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseInputLevelsCommand.java @@ -0,0 +1,40 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DataverseFacet; +import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevel; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * List the field type input levels {@link DataverseFieldTypeInputLevel} of a {@link Dataverse}. + */ +public class ListDataverseInputLevelsCommand extends AbstractCommand> { + + private final Dataverse dataverse; + + public ListDataverseInputLevelsCommand(DataverseRequest request, Dataverse dataverse) { + super(request, dataverse); + this.dataverse = dataverse; + } + + @Override + public List execute(CommandContext ctxt) throws CommandException { + return dataverse.getDataverseFieldTypeInputLevels(); + } + + @Override + public Map> getRequiredPermissions() { + return Collections.singletonMap("", + dataverse.isReleased() ? Collections.emptySet() + : Collections.singleton(Permission.ViewUnpublishedDataverse)); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListFacetsCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListFacetsCommand.java index cbab378ccac..36bd1ef4981 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListFacetsCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListFacetsCommand.java @@ -7,6 +7,7 @@ import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; + import java.util.Collections; import java.util.List; import java.util.Map; @@ -14,27 +15,34 @@ /** * List the search facets {@link DataverseFacet} of a {@link Dataverse}. + * * @author michaelsuo */ // no annotations here, since permissions are dynamically decided public class ListFacetsCommand extends AbstractCommand> { - private final Dataverse dv; + private final Dataverse dataverse; + private boolean rootFacets; + + public ListFacetsCommand(DataverseRequest request, Dataverse dataverse) { + this(request, dataverse, true); + } - public ListFacetsCommand(DataverseRequest aRequest, Dataverse aDataverse) { - super(aRequest, aDataverse); - dv = aDataverse; + public ListFacetsCommand(DataverseRequest request, Dataverse dataverse, boolean rootFacets) { + super(request, dataverse); + this.dataverse = dataverse; + this.rootFacets = rootFacets; } @Override public List execute(CommandContext ctxt) throws CommandException { - return dv.getDataverseFacets(); + return dataverse.getDataverseFacets(!rootFacets); } @Override public Map> getRequiredPermissions() { return Collections.singletonMap("", - dv.isReleased() ? Collections.emptySet() - : Collections.singleton(Permission.ViewUnpublishedDataverse)); + dataverse.isReleased() ? Collections.emptySet() + : Collections.singleton(Permission.ViewUnpublishedDataverse)); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseInputLevelsCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseInputLevelsCommand.java index cf7b4a6f69c..b9b08992919 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseInputLevelsCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseInputLevelsCommand.java @@ -2,7 +2,6 @@ import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevel; -import edu.harvard.iq.dataverse.MetadataBlock; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; @@ -29,23 +28,8 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { if (inputLevelList == null || inputLevelList.isEmpty()) { throw new CommandException("Error while updating dataverse input levels: Input level list cannot be null or empty", this); } - addInputLevelMetadataBlocks(); + dataverse.addInputLevelsMetadataBlocksIfNotPresent(inputLevelList); dataverse.setMetadataBlockRoot(true); return ctxt.engine().submit(new UpdateDataverseCommand(dataverse, null, null, getRequest(), inputLevelList)); } - - private void addInputLevelMetadataBlocks() { - List dataverseMetadataBlocks = dataverse.getMetadataBlocks(); - for (DataverseFieldTypeInputLevel inputLevel : inputLevelList) { - MetadataBlock inputLevelMetadataBlock = inputLevel.getDatasetFieldType().getMetadataBlock(); - if (!dataverseHasMetadataBlock(dataverseMetadataBlocks, inputLevelMetadataBlock)) { - dataverseMetadataBlocks.add(inputLevelMetadataBlock); - } - } - dataverse.setMetadataBlocks(dataverseMetadataBlocks); - } - - private boolean dataverseHasMetadataBlock(List dataverseMetadataBlocks, MetadataBlock metadataBlock) { - return dataverseMetadataBlocks.stream().anyMatch(block -> block.getId().equals(metadataBlock.getId())); - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index c72dfc1d127..c908a4d2bce 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -1384,4 +1384,20 @@ public static JsonArrayBuilder jsonDataverseFieldTypeInputLevels(List inputLevels) { + JsonArrayBuilder inputLevelsArrayBuilder = Json.createArrayBuilder(); + for (DataverseFieldTypeInputLevel inputLevel : inputLevels) { + inputLevelsArrayBuilder.add(jsonDataverseInputLevel(inputLevel)); + } + return inputLevelsArrayBuilder; + } + + private static JsonObjectBuilder jsonDataverseInputLevel(DataverseFieldTypeInputLevel inputLevel) { + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + jsonObjectBuilder.add("datasetFieldTypeName", inputLevel.getDatasetFieldType().getName()); + jsonObjectBuilder.add("required", inputLevel.isRequired()); + jsonObjectBuilder.add("include", inputLevel.isInclude()); + return jsonObjectBuilder; + } } diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index b645276ceaf..4b366522966 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -947,7 +947,13 @@ dataverse.default=(Default) dataverse.metadatalanguage.setatdatasetcreation=Chosen at Dataset Creation dataverse.guestbookentry.atdownload=Guestbook Entry At Download dataverse.guestbookentry.atrequest=Guestbook Entry At Access Request -dataverse.updateinputlevels.error.invalidfieldtypename=Invalid dataset field type name: {0} +dataverse.inputlevels.error.invalidfieldtypename=Invalid dataset field type name: {0} +dataverse.inputlevels.error.cannotberequiredifnotincluded=The input level for the dataset field type {0} cannot be required if it is not included +dataverse.facets.error.fieldtypenotfound=Can't find dataset field type '{0}' +dataverse.facets.error.fieldtypenotfacetable=Dataset field type '{0}' is not facetable +dataverse.metadatablocks.error.invalidmetadatablockname=Invalid metadata block name: {0} +dataverse.create.error.jsonparse=Error parsing Json: {0} +dataverse.create.error.jsonparsetodataverse=Error parsing the POSTed json into a dataverse: {0} # rolesAndPermissionsFragment.xhtml # advanced.xhtml diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 79cc46cfa79..09b60e46e7e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -941,6 +941,13 @@ public void testUpdateInputLevels() { .body("message", equalTo("Invalid dataset field type name: invalid1")) .statusCode(BAD_REQUEST.getStatusCode()); + // Update input levels with invalid configuration (field required but not included) + testIncludedInputLevels = new boolean[]{false, false}; + updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInputLevelNames, testRequiredInputLevels, testIncludedInputLevels, apiToken); + updateDataverseInputLevelsResponse.then().assertThat() + .body("message", equalTo(BundleUtil.getStringFromBundle("dataverse.inputlevels.error.cannotberequiredifnotincluded", List.of("geographicCoverage")))) + .statusCode(BAD_REQUEST.getStatusCode()); + // Update invalid empty input levels testInputLevelNames = new String[]{}; updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInputLevelNames, testRequiredInputLevels, testIncludedInputLevels, apiToken); @@ -948,4 +955,94 @@ public void testUpdateInputLevels() { .body("message", equalTo("Error while updating dataverse input levels: Input level list cannot be null or empty")) .statusCode(INTERNAL_SERVER_ERROR.getStatusCode()); } + + @Test + public void testAddDataverse() { + Response createUser = UtilIT.createRandomUser(); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + String testAliasSuffix = "-add-dataverse"; + + // Without optional input levels and facet ids + String testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; + Response createSubDataverseResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root"); + createSubDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + Response listMetadataBlocksResponse = UtilIT.listMetadataBlocks(testDataverseAlias, false, false, apiToken); + listMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); + String actualMetadataBlockName = listMetadataBlocksResponse.then().extract().path("data[0].name"); + assertEquals(actualMetadataBlockName, "citation"); + + // With optional input levels and facet ids + String[] testInputLevelNames = {"geographicCoverage", "country"}; + String[] testFacetIds = {"authorName", "authorAffiliation"}; + String[] testMetadataBlockNames = {"citation", "geospatial"}; + testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; + createSubDataverseResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInputLevelNames, testFacetIds, testMetadataBlockNames); + createSubDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + + // Assert facets are configured + Response listDataverseFacetsResponse = UtilIT.listDataverseFacets(testDataverseAlias, apiToken); + String actualFacetName1 = listDataverseFacetsResponse.then().extract().path("data[0]"); + String actualFacetName2 = listDataverseFacetsResponse.then().extract().path("data[1]"); + assertNotEquals(actualFacetName1, actualFacetName2); + assertThat(testFacetIds, hasItemInArray(actualFacetName1)); + assertThat(testFacetIds, hasItemInArray(actualFacetName2)); + + // Assert input levels are configured + Response listDataverseInputLevelsResponse = UtilIT.listDataverseInputLevels(testDataverseAlias, apiToken); + String actualInputLevelName1 = listDataverseInputLevelsResponse.then().extract().path("data[0].datasetFieldTypeName"); + String actualInputLevelName2 = listDataverseInputLevelsResponse.then().extract().path("data[1].datasetFieldTypeName"); + assertNotEquals(actualFacetName1, actualFacetName2); + assertThat(testInputLevelNames, hasItemInArray(actualInputLevelName1)); + assertThat(testInputLevelNames, hasItemInArray(actualInputLevelName2)); + + // Assert metadata blocks are configured + listMetadataBlocksResponse = UtilIT.listMetadataBlocks(testDataverseAlias, false, false, apiToken); + listMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); + String actualMetadataBlockName1 = listMetadataBlocksResponse.then().extract().path("data[0].name"); + String actualMetadataBlockName2 = listMetadataBlocksResponse.then().extract().path("data[1].name"); + assertNotEquals(actualMetadataBlockName1, actualMetadataBlockName2); + assertThat(testMetadataBlockNames, hasItemInArray(actualMetadataBlockName1)); + assertThat(testMetadataBlockNames, hasItemInArray(actualMetadataBlockName2)); + + // Setting metadata blocks without citation + testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; + String[] testMetadataBlockNamesWithoutCitation = {"geospatial"}; + createSubDataverseResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", null, null, testMetadataBlockNamesWithoutCitation); + createSubDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + + // Assert metadata blocks including citation are configured + String[] testExpectedBlockNames = {"citation", "geospatial"}; + actualMetadataBlockName1 = listMetadataBlocksResponse.then().extract().path("data[0].name"); + actualMetadataBlockName2 = listMetadataBlocksResponse.then().extract().path("data[1].name"); + assertNotEquals(actualMetadataBlockName1, actualMetadataBlockName2); + assertThat(testExpectedBlockNames, hasItemInArray(actualMetadataBlockName1)); + assertThat(testExpectedBlockNames, hasItemInArray(actualMetadataBlockName2)); + + // Should return error when an invalid facet id is sent + String invalidFacetId = "invalidFacetId"; + String[] testInvalidFacetIds = {"authorName", invalidFacetId}; + testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; + createSubDataverseResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInputLevelNames, testInvalidFacetIds, testMetadataBlockNames); + createSubDataverseResponse.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("Cant find dataset field type \"" + invalidFacetId + "\"")); + + // Should return error when an invalid input level is sent + String invalidInputLevelName = "wrongInputLevel"; + String[] testInvalidInputLevelNames = {"geographicCoverage", invalidInputLevelName}; + testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; + createSubDataverseResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInvalidInputLevelNames, testFacetIds, testMetadataBlockNames); + createSubDataverseResponse.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("Invalid dataset field type name: " + invalidInputLevelName)); + + // Should return error when an invalid metadata block name is sent + String invalidMetadataBlockName = "invalidMetadataBlockName"; + String[] testInvalidMetadataBlockNames = {"citation", invalidMetadataBlockName}; + testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; + createSubDataverseResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInputLevelNames, testInvalidFacetIds, testInvalidMetadataBlockNames); + createSubDataverseResponse.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("Invalid metadata block name: \"" + invalidMetadataBlockName + "\"")); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 0216859b869..917154c80cc 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -358,20 +358,57 @@ public static Response getServiceDocument(String apiToken) { static Response createDataverse(String alias, String category, String apiToken) { return createSubDataverse(alias, category, apiToken, ":root"); } - + static Response createSubDataverse(String alias, String category, String apiToken, String parentDV) { + return createSubDataverse(alias, category, apiToken, parentDV, null, null, null); + } + + static Response createSubDataverse(String alias, String category, String apiToken, String parentDV, String[] inputLevelNames, String[] facetIds, String[] metadataBlockNames) { JsonArrayBuilder contactArrayBuilder = Json.createArrayBuilder(); contactArrayBuilder.add(Json.createObjectBuilder().add("contactEmail", getEmailFromUserName(getRandomIdentifier()))); JsonArrayBuilder subjectArrayBuilder = Json.createArrayBuilder(); subjectArrayBuilder.add("Other"); - JsonObject dvData = Json.createObjectBuilder() + JsonObjectBuilder objectBuilder = Json.createObjectBuilder() .add("alias", alias) .add("name", alias) .add("dataverseContacts", contactArrayBuilder) .add("dataverseSubjects", subjectArrayBuilder) // don't send "dataverseType" if category is null, must be a better way - .add(category != null ? "dataverseType" : "notTheKeyDataverseType", category != null ? category : "whatever") - .build(); + .add(category != null ? "dataverseType" : "notTheKeyDataverseType", category != null ? category : "whatever"); + + JsonObjectBuilder metadataBlocksObjectBuilder = Json.createObjectBuilder(); + + if (inputLevelNames != null) { + JsonArrayBuilder inputLevelsArrayBuilder = Json.createArrayBuilder(); + for(String inputLevelName : inputLevelNames) { + inputLevelsArrayBuilder.add(Json.createObjectBuilder() + .add("datasetFieldTypeName", inputLevelName) + .add("required", true) + .add("include", true) + ); + } + metadataBlocksObjectBuilder.add("inputLevels", inputLevelsArrayBuilder); + } + + if (metadataBlockNames != null) { + JsonArrayBuilder metadataBlockNamesArrayBuilder = Json.createArrayBuilder(); + for(String metadataBlockName : metadataBlockNames) { + metadataBlockNamesArrayBuilder.add(metadataBlockName); + } + metadataBlocksObjectBuilder.add("metadataBlockNames", metadataBlockNamesArrayBuilder); + } + + objectBuilder.add("metadataBlocks", metadataBlocksObjectBuilder); + + if (facetIds != null) { + JsonArrayBuilder facetIdsArrayBuilder = Json.createArrayBuilder(); + for(String facetId : facetIds) { + facetIdsArrayBuilder.add(facetId); + } + objectBuilder.add("facetIds", facetIdsArrayBuilder); + } + + JsonObject dvData = objectBuilder.build(); Response createDataverseResponse = given() .body(dvData.toString()).contentType(ContentType.JSON) .when().post("/api/dataverses/" + parentDV + "?key=" + apiToken); @@ -3986,4 +4023,18 @@ public static Response getOpenAPI(String accept, String format) { .get("/openapi"); return response; } + + static Response listDataverseFacets(String dataverseAlias, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .contentType("application/json") + .get("/api/dataverses/" + dataverseAlias + "/facets"); + } + + static Response listDataverseInputLevels(String dataverseAlias, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .contentType("application/json") + .get("/api/dataverses/" + dataverseAlias + "/inputLevels"); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java b/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java index 927d288d660..1b66ca56b23 100644 --- a/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java +++ b/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java @@ -196,6 +196,7 @@ public static DatasetFieldType makeDatasetFieldType() { DatasetFieldType retVal = new DatasetFieldType("SampleType-"+id, FieldType.TEXT, false); retVal.setId(id); MetadataBlock mdb = new MetadataBlock(); + mdb.setId(new Random().nextLong()); mdb.setName("Test"); retVal.setMetadataBlock(mdb); return retVal;