diff --git a/.env b/.env index 3467f8df73b..e3ececc2e54 100644 --- a/.env +++ b/.env @@ -1,4 +1,4 @@ APP_IMAGE=gdcc/dataverse:unstable POSTGRES_VERSION=13 DATAVERSE_DB_USER=dataverse -SOLR_VERSION=8.11.1 +SOLR_VERSION=9.3.0 diff --git a/.github/workflows/container_app_pr.yml b/.github/workflows/container_app_pr.yml index 9e514690a13..c86d284e74b 100644 --- a/.github/workflows/container_app_pr.yml +++ b/.github/workflows/container_app_pr.yml @@ -25,7 +25,7 @@ jobs: ref: 'refs/pull/${{ github.event.client_payload.pull_request.number }}/merge' - uses: actions/setup-java@v3 with: - java-version: "11" + java-version: "17" distribution: 'adopt' - uses: actions/cache@v3 with: diff --git a/.github/workflows/container_app_push.yml b/.github/workflows/container_app_push.yml index c60691b1c85..b3e247e376c 100644 --- a/.github/workflows/container_app_push.yml +++ b/.github/workflows/container_app_push.yml @@ -36,10 +36,10 @@ jobs: - name: Checkout repository uses: actions/checkout@v3 - - name: Set up JDK 11 + - name: Set up JDK uses: actions/setup-java@v3 with: - java-version: "11" + java-version: "17" distribution: temurin cache: maven @@ -99,17 +99,21 @@ jobs: name: "Package & Publish" runs-on: ubuntu-latest # Only run this job if we have access to secrets. This is true for events like push/schedule which run in - # context of main repo, but for PRs only true if coming from the main repo! Forks have no secret access. - if: needs.check-secrets.outputs.available == 'true' + # context of the main repo, but for PRs only true if coming from the main repo! Forks have no secret access. + # + # Note: The team's decision was to not auto-deploy an image on any git push where no PR exists (yet). + # Accordingly, only run for push events on branches develop and master. + if: needs.check-secrets.outputs.available == 'true' && + ( github.event_name != 'push' || ( github.event_name == 'push' && contains(fromJSON('["develop", "master"]'), github.ref_name))) steps: - uses: actions/checkout@v3 - uses: actions/setup-java@v3 with: - java-version: "11" + java-version: "17" distribution: temurin # Depending on context, we push to different targets. Login accordingly. - - if: ${{ github.event_name != 'pull_request' }} + - if: github.event_name != 'pull_request' name: Log in to Docker Hub registry uses: docker/login-action@v2 with: diff --git a/.github/workflows/container_base_push.yml b/.github/workflows/container_base_push.yml index 5c62fb0c811..b938851f816 100644 --- a/.github/workflows/container_base_push.yml +++ b/.github/workflows/container_base_push.yml @@ -34,7 +34,7 @@ jobs: packages: read strategy: matrix: - jdk: [ '11' ] + jdk: [ '17' ] # Only run in upstream repo - avoid unnecessary runs in forks if: ${{ github.repository_owner == 'IQSS' }} diff --git a/.github/workflows/cypress_ui.yml.future b/.github/workflows/cypress_ui.yml.future index b38ae2f9558..0823233fdeb 100644 --- a/.github/workflows/cypress_ui.yml.future +++ b/.github/workflows/cypress_ui.yml.future @@ -2,6 +2,7 @@ # # THIS IS AN OLD TRAVIS-CI.ORG JOB FILE # To be used with Github Actions, it would be necessary to refactor it. +# In addition, it needs to be rewritten to use our modern containers. # Keeping it as the future example it has been before. # See also #5846 # @@ -30,8 +31,6 @@ jobs: directories: # we also need to cache folder with Cypress binary - ~/.cache - # we want to cache the Glassfish and Solr dependencies as well - - conf/docker-aio/dv/deps before_install: - cd tests install: diff --git a/.github/workflows/deploy_beta_testing.yml b/.github/workflows/deploy_beta_testing.yml new file mode 100644 index 00000000000..2443ef8b2e0 --- /dev/null +++ b/.github/workflows/deploy_beta_testing.yml @@ -0,0 +1,82 @@ +name: 'Deploy to Beta Testing' + +on: + push: + branches: + - develop + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-java@v3 + with: + distribution: 'zulu' + java-version: '17' + + - name: Enable API Session Auth feature flag + working-directory: src/main/resources/META-INF + run: echo -e "dataverse.feature.api-session-auth=true" >> microprofile-config.properties + + - name: Build application war + run: mvn package + + - name: Get war file name + working-directory: target + run: echo "war_file=$(ls *.war | head -1)">> $GITHUB_ENV + + - name: Upload war artifact + uses: actions/upload-artifact@v3 + with: + name: built-app + path: ./target/${{ env.war_file }} + + deploy-to-payara: + needs: build + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + + - name: Download war artifact + uses: actions/download-artifact@v3 + with: + name: built-app + path: ./ + + - name: Get war file name + run: echo "war_file=$(ls *.war | head -1)">> $GITHUB_ENV + + - name: Copy war file to remote instance + uses: appleboy/scp-action@master + with: + host: ${{ secrets.BETA_PAYARA_INSTANCE_HOST }} + username: ${{ secrets.BETA_PAYARA_INSTANCE_USERNAME }} + key: ${{ secrets.BETA_PAYARA_INSTANCE_SSH_PRIVATE_KEY }} + source: './${{ env.war_file }}' + target: '/home/${{ secrets.BETA_PAYARA_INSTANCE_USERNAME }}' + overwrite: true + + - name: Execute payara war deployment remotely + uses: appleboy/ssh-action@v1.0.0 + env: + INPUT_WAR_FILE: ${{ env.war_file }} + with: + host: ${{ secrets.BETA_PAYARA_INSTANCE_HOST }} + username: ${{ secrets.BETA_PAYARA_INSTANCE_USERNAME }} + key: ${{ secrets.BETA_PAYARA_INSTANCE_SSH_PRIVATE_KEY }} + envs: INPUT_WAR_FILE + script: | + APPLICATION_NAME=dataverse-backend + ASADMIN='/usr/local/payara6/bin/asadmin --user admin' + $ASADMIN undeploy $APPLICATION_NAME + $ASADMIN stop-domain + rm -rf /usr/local/payara6/glassfish/domains/domain1/generated + rm -rf /usr/local/payara6/glassfish/domains/domain1/osgi-cache + $ASADMIN start-domain + $ASADMIN deploy --name $APPLICATION_NAME $INPUT_WAR_FILE + $ASADMIN stop-domain + $ASADMIN start-domain diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 45beabf3193..efa3fa4a471 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -22,18 +22,9 @@ jobs: strategy: fail-fast: false matrix: - jdk: [ '11' ] + jdk: [ '17' ] experimental: [false] status: ["Stable"] - # - # JDK 17 builds disabled due to non-essential fails marking CI jobs as completely failed within - # Github Projects, PR lists etc. This was consensus on Slack #dv-tech. See issue #8094 - # (This is a limitation of how Github is currently handling these things.) - # - #include: - # - jdk: '17' - # experimental: true - # status: "Experimental" continue-on-error: ${{ matrix.experimental }} runs-on: ubuntu-latest steps: @@ -68,6 +59,14 @@ jobs: # We don't want to cache the WAR file, so delete it - run: rm -rf ~/.m2/repository/edu/harvard/iq/dataverse + + # Upload the built war file. For download, it will be wrapped in a ZIP by GitHub. + # See also https://github.com/actions/upload-artifact#zipped-artifact-downloads + - uses: actions/upload-artifact@v3 + with: + name: dataverse-java${{ matrix.jdk }}.war + path: target/dataverse*.war + retention-days: 7 push-app-img: name: Publish App Image permissions: diff --git a/.github/workflows/shellcheck.yml b/.github/workflows/shellcheck.yml index 94ba041e135..56f7d648dc4 100644 --- a/.github/workflows/shellcheck.yml +++ b/.github/workflows/shellcheck.yml @@ -33,7 +33,6 @@ jobs: # Exclude old scripts exclude: | */.git/* - conf/docker-aio/* doc/* downloads/* scripts/database/* @@ -43,5 +42,4 @@ jobs: scripts/issues/* scripts/r/* scripts/tests/* - scripts/vagrant/* tests/* diff --git a/.github/workflows/shellspec.yml b/.github/workflows/shellspec.yml index 5c251cfc897..227a74fa00f 100644 --- a/.github/workflows/shellspec.yml +++ b/.github/workflows/shellspec.yml @@ -60,7 +60,7 @@ jobs: shellspec shellspec-macos: name: "MacOS" - runs-on: macos-10.15 + runs-on: macos-latest steps: - name: Install shellspec run: curl -fsSL https://git.io/shellspec | sh -s 0.28.1 --yes diff --git a/.github/workflows/spi_release.yml b/.github/workflows/spi_release.yml index 1fbf05ce693..8ad74b3e4bb 100644 --- a/.github/workflows/spi_release.yml +++ b/.github/workflows/spi_release.yml @@ -2,12 +2,12 @@ name: Dataverse SPI on: push: - branch: + branches: - "develop" paths: - "modules/dataverse-spi/**" pull_request: - branch: + branches: - "develop" paths: - "modules/dataverse-spi/**" @@ -40,7 +40,7 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-java@v3 with: - java-version: '11' + java-version: '17' distribution: 'adopt' server-id: ossrh server-username: MAVEN_USERNAME @@ -66,7 +66,7 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-java@v3 with: - java-version: '11' + java-version: '17' distribution: 'adopt' - uses: actions/cache@v2 with: @@ -78,7 +78,7 @@ jobs: - name: Set up Maven Central Repository uses: actions/setup-java@v3 with: - java-version: '11' + java-version: '17' distribution: 'adopt' server-id: ossrh server-username: MAVEN_USERNAME @@ -91,4 +91,4 @@ jobs: env: MAVEN_USERNAME: ${{ secrets.DATAVERSEBOT_SONATYPE_USERNAME }} MAVEN_PASSWORD: ${{ secrets.DATAVERSEBOT_SONATYPE_TOKEN }} - MAVEN_GPG_PASSPHRASE: ${{ secrets.DATAVERSEBOT_GPG_PASSWORD }} \ No newline at end of file + MAVEN_GPG_PASSPHRASE: ${{ secrets.DATAVERSEBOT_GPG_PASSWORD }} diff --git a/.gitignore b/.gitignore index d38538fc364..7f0d3a2b466 100644 --- a/.gitignore +++ b/.gitignore @@ -18,7 +18,6 @@ GRTAGS .Trashes ehthumbs.db Thumbs.db -.vagrant *.pyc *.swp scripts/api/py_api_wrapper/demo-data/* @@ -39,17 +38,6 @@ scripts/api/setup-all.*.log # ctags generated tag file tags -# dependencies I'm not sure we're allowed to redistribute / have in version control -conf/docker-aio/dv/deps/ - -# no need to check aoi installer zip into vc -conf/docker-aio/dv/install/dvinstall.zip -# or copy of test data -conf/docker-aio/testdata/ - -# docker-aio creates maven/ which reports 86 new files. ignore this wd. -maven/ - scripts/installer/default.config *.pem @@ -71,8 +59,5 @@ scripts/search/data/binary/trees.png.thumb140 src/main/webapp/resources/images/cc0.png.thumb140 src/main/webapp/resources/images/dataverseproject.png.thumb140 -# apache-maven is downloaded by docker-aio -apache-maven* - # Docker development volumes /docker-dev-volumes diff --git a/Vagrantfile b/Vagrantfile deleted file mode 100644 index 8293fbaf5fc..00000000000 --- a/Vagrantfile +++ /dev/null @@ -1,27 +0,0 @@ -# -*- mode: ruby -*- -# vi: set ft=ruby : - -VAGRANTFILE_API_VERSION = "2" - -Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| - config.vm.box = "bento/rockylinux-8.4" - - config.vm.provider "virtualbox" do |vbox| - vbox.cpus = 4 - vbox.memory = 4096 - end - - config.vm.provision "shell", path: "scripts/vagrant/setup.sh" - config.vm.provision "shell", path: "scripts/vagrant/setup-solr.sh" - config.vm.provision "shell", path: "scripts/vagrant/install-dataverse.sh" - - config.vm.network "private_network", type: "dhcp" - config.vm.network "forwarded_port", guest: 80, host: 8888 - config.vm.network "forwarded_port", guest: 443, host: 9999 - config.vm.network "forwarded_port", guest: 8983, host: 8993 - config.vm.network "forwarded_port", guest: 8080, host: 8088 - config.vm.network "forwarded_port", guest: 8181, host: 8188 - - config.vm.synced_folder ".", "/dataverse" - -end diff --git a/conf/docker-aio/0prep_deps.sh b/conf/docker-aio/0prep_deps.sh deleted file mode 100755 index 13a91705303..00000000000 --- a/conf/docker-aio/0prep_deps.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/sh -if [ ! -d dv/deps ]; then - mkdir -p dv/deps -fi -wdir=`pwd` - -if [ ! -e dv/deps/payara-5.2022.3.zip ]; then - echo "payara dependency prep" - wget https://s3-eu-west-1.amazonaws.com/payara.fish/Payara+Downloads/5.2022.3/payara-5.2022.3.zip -O dv/deps/payara-5.2022.3.zip -fi - -if [ ! -e dv/deps/solr-8.11.1dv.tgz ]; then - echo "solr dependency prep" - # schema changes *should* be the only ones... - cd dv/deps/ - wget https://archive.apache.org/dist/lucene/solr/8.11.1/solr-8.11.1.tgz -O solr-8.11.1dv.tgz - cd ../../ -fi - diff --git a/conf/docker-aio/1prep.sh b/conf/docker-aio/1prep.sh deleted file mode 100755 index 508d41d93ff..00000000000 --- a/conf/docker-aio/1prep.sh +++ /dev/null @@ -1,39 +0,0 @@ -#!/bin/bash - -# move things necessary for integration tests into build context. -# this was based off the phoenix deployment; and is likely uglier and bulkier than necessary in a perfect world - -mkdir -p testdata/doc/sphinx-guides/source/_static/util/ -cp ../solr/8.11.1/schema*.xml testdata/ -cp ../solr/8.11.1/solrconfig.xml testdata/ -cp ../jhove/jhove.conf testdata/ -cp ../jhove/jhoveConfig.xsd testdata/ -cd ../../ -cp -r scripts conf/docker-aio/testdata/ -cp doc/sphinx-guides/source/_static/util/createsequence.sql conf/docker-aio/testdata/doc/sphinx-guides/source/_static/util/ - -wget -q https://downloads.apache.org/maven/maven-3/3.8.5/binaries/apache-maven-3.8.5-bin.tar.gz -tar xfz apache-maven-3.8.5-bin.tar.gz -mkdir maven -mv apache-maven-3.8.5/* maven/ -echo "export JAVA_HOME=/usr/lib/jvm/jre-openjdk" > maven/maven.sh -echo "export M2_HOME=../maven" >> maven/maven.sh -echo "export MAVEN_HOME=../maven" >> maven/maven.sh -echo "export PATH=../maven/bin:${PATH}" >> maven/maven.sh -chmod 0755 maven/maven.sh - -# not using dvinstall.zip for setupIT.bash; but still used in install.bash for normal ops -source maven/maven.sh && mvn clean -./scripts/installer/custom-build-number -source maven/maven.sh && mvn package -cd scripts/installer -make clean -make -mkdir -p ../../conf/docker-aio/dv/install -cp dvinstall.zip ../../conf/docker-aio/dv/install/ - -# ITs sometimes need files server-side -# yes, these copies could be avoided by moving the build root here. but the build -# context is already big enough that it seems worth avoiding. -cd ../../ -cp src/test/java/edu/harvard/iq/dataverse/makedatacount/sushi_sample_logs.json conf/docker-aio/testdata/ diff --git a/conf/docker-aio/c8.dockerfile b/conf/docker-aio/c8.dockerfile deleted file mode 100644 index 0002464cbf2..00000000000 --- a/conf/docker-aio/c8.dockerfile +++ /dev/null @@ -1,87 +0,0 @@ -FROM rockylinux/rockylinux:latest -# OS dependencies -# IQSS now recommends Postgres 13. -RUN dnf -qy module disable postgresql -RUN yum install -y https://download.postgresql.org/pub/repos/yum/reporpms/EL-8-x86_64/pgdg-redhat-repo-latest.noarch.rpm - -RUN echo "fastestmirror=true" >> /etc/dnf/dnf.conf -RUN yum install -y java-11-openjdk-devel postgresql13-server sudo epel-release unzip curl httpd python2 diffutils -RUN yum install -y jq lsof awscli - -# for older search scripts -RUN ln -s /usr/bin/python2 /usr/bin/python - -# copy and unpack dependencies (solr, payara) -COPY dv /tmp/dv -COPY testdata/schema*.xml /tmp/dv/ -COPY testdata/solrconfig.xml /tmp/dv - -# ITs need files -COPY testdata/sushi_sample_logs.json /tmp/ - -# IPv6 and localhost appears to be related to some of the intermittant connection issues -COPY disableipv6.conf /etc/sysctl.d/ -RUN rm /etc/httpd/conf/* -COPY httpd.conf /etc/httpd/conf -RUN cd /opt ; tar zxf /tmp/dv/deps/solr-8.11.1dv.tgz -RUN cd /opt ; unzip /tmp/dv/deps/payara-5.2022.3.zip ; ln -s /opt/payara5 /opt/glassfish4 - -# this copy of domain.xml is the result of running `asadmin set server.monitoring-service.module-monitoring-levels.jvm=LOW` on a default glassfish installation (aka - enable the glassfish REST monitir endpoint for the jvm` -# this dies under Java 11, do we keep it? -#COPY domain-restmonitor.xml /opt/payara5/glassfish/domains/domain1/config/domain.xml - -RUN sudo -u postgres /usr/pgsql-13/bin/initdb -D /var/lib/pgsql/13/data -E 'UTF-8' - -# copy configuration related files -RUN cp /tmp/dv/pg_hba.conf /var/lib/pgsql/13/data/ -RUN cp -r /opt/solr-8.11.1/server/solr/configsets/_default /opt/solr-8.11.1/server/solr/collection1 -RUN cp /tmp/dv/schema*.xml /opt/solr-8.11.1/server/solr/collection1/conf/ -RUN cp /tmp/dv/solrconfig.xml /opt/solr-8.11.1/server/solr/collection1/conf/solrconfig.xml - -# skipping payara user and solr user (run both as root) - -#solr port -EXPOSE 8983 - -# postgres port -EXPOSE 5432 - -# payara port -EXPOSE 8080 - -# apache port, http -EXPOSE 80 - -# debugger ports (jmx,jdb) -EXPOSE 8686 -EXPOSE 9009 - -RUN mkdir /opt/dv - -# keeping the symlink on the off chance that something else is still assuming /usr/local/glassfish4 -RUN ln -s /opt/payara5 /usr/local/glassfish4 -COPY dv/install/ /opt/dv/ -COPY install.bash /opt/dv/ -COPY entrypoint.bash /opt/dv/ -COPY testdata /opt/dv/testdata -COPY testscripts/* /opt/dv/testdata/ -COPY setupIT.bash /opt/dv -WORKDIR /opt/dv - -# need to take DOI provider info from build args as of ec377d2a4e27424db8815c55ce544deee48fc5e0 -# Default to EZID; use built-args to switch to DataCite (or potentially handles) -#ARG DoiProvider=EZID -ARG DoiProvider=FAKE -ARG doi_baseurl=https://ezid.cdlib.org -ARG doi_username=apitest -ARG doi_password=apitest -ENV DoiProvider=${DoiProvider} -ENV doi_baseurl=${doi_baseurl} -ENV doi_username=${doi_username} -ENV doi_password=${doi_password} -COPY configure_doi.bash /opt/dv - -# healthcheck for payara only (assumes modified domain.xml); -# does not check dataverse application status. -HEALTHCHECK CMD curl --fail http://localhost:4848/monitoring/domain/server.json || exit 1 -CMD ["/opt/dv/entrypoint.bash"] diff --git a/conf/docker-aio/configure_doi.bash b/conf/docker-aio/configure_doi.bash deleted file mode 100755 index f0f0bc6d0d4..00000000000 --- a/conf/docker-aio/configure_doi.bash +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env bash - -cd /opt/payara5 - -# if appropriate; reconfigure PID provider on the basis of environmental variables. -if [ ! -z "${DoiProvider}" ]; then - curl -X PUT -d ${DoiProvider} http://localhost:8080/api/admin/settings/:DoiProvider -fi -if [ ! -z "${doi_username}" ]; then - bin/asadmin create-jvm-options "-Ddoi.username=${doi_username}" -fi -if [ ! -z "${doi_password}" ]; then - bin/asadmin create-jvm-options "-Ddoi.password=${doi_password}" -fi -if [ ! -z "${doi_baseurl}" ]; then - bin/asadmin delete-jvm-options "-Ddoi.baseurlstring=https\://mds.test.datacite.org" - doi_baseurl_esc=`echo ${doi_baseurl} | sed -e 's/:/\\\:/'` - bin/asadmin create-jvm-options "-Ddoi.baseurlstring=${doi_baseurl_esc}" -fi -if [ ! -z "${doi_dataciterestapiurl}" ]; then - bin/asadmin delete-jvm-options "-Ddoi.dataciterestapiurlstring=https\://api.test.datacite.org" - doi_dataciterestapiurl_esc=`echo ${doi_dataciterestapiurl} | sed -e 's/:/\\\:/'` - bin/asadmin create-jvm-options "-Ddoi.dataciterestapiurlstring=${doi_dataciterestapiurl_esc}" -fi diff --git a/conf/docker-aio/disableipv6.conf b/conf/docker-aio/disableipv6.conf deleted file mode 100644 index 8d425183e3f..00000000000 --- a/conf/docker-aio/disableipv6.conf +++ /dev/null @@ -1 +0,0 @@ -net.ipv6.conf.all.disable_ipv6 = 1 diff --git a/conf/docker-aio/domain-restmonitor.xml b/conf/docker-aio/domain-restmonitor.xml deleted file mode 100644 index a18a88ab011..00000000000 --- a/conf/docker-aio/domain-restmonitor.xml +++ /dev/null @@ -1,486 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -XX:MaxPermSize=192m - -client - -Djava.awt.headless=true - -Djdk.corba.allowOutputStreamSubclass=true - -Djavax.xml.accessExternalSchema=all - -Djavax.management.builder.initial=com.sun.enterprise.v3.admin.AppServerMBeanServerBuilder - -XX:+UnlockDiagnosticVMOptions - -Djava.endorsed.dirs=${com.sun.aas.installRoot}/modules/endorsed${path.separator}${com.sun.aas.installRoot}/lib/endorsed - -Djava.security.policy=${com.sun.aas.instanceRoot}/config/server.policy - -Djava.security.auth.login.config=${com.sun.aas.instanceRoot}/config/login.conf - -Dcom.sun.enterprise.security.httpsOutboundKeyAlias=s1as - -Xmx512m - -Djavax.net.ssl.keyStore=${com.sun.aas.instanceRoot}/config/keystore.jks - -Djavax.net.ssl.trustStore=${com.sun.aas.instanceRoot}/config/cacerts.jks - -Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext - -Djdbc.drivers=org.apache.derby.jdbc.ClientDriver - -DANTLR_USE_DIRECT_CLASS_LOADING=true - -Dcom.sun.enterprise.config.config_environment_factory_class=com.sun.enterprise.config.serverbeans.AppserverConfigEnvironmentFactory - - -Dorg.glassfish.additionalOSGiBundlesToStart=org.apache.felix.shell,org.apache.felix.gogo.runtime,org.apache.felix.gogo.shell,org.apache.felix.gogo.command,org.apache.felix.shell.remote,org.apache.felix.fileinstall - - - -Dosgi.shell.telnet.port=6666 - - -Dosgi.shell.telnet.maxconn=1 - - -Dosgi.shell.telnet.ip=127.0.0.1 - - -Dgosh.args=--nointeractive - - -Dfelix.fileinstall.dir=${com.sun.aas.installRoot}/modules/autostart/ - - -Dfelix.fileinstall.poll=5000 - - -Dfelix.fileinstall.log.level=2 - - -Dfelix.fileinstall.bundles.new.start=true - - -Dfelix.fileinstall.bundles.startTransient=true - - -Dfelix.fileinstall.disableConfigSave=false - - -XX:NewRatio=2 - - -Dcom.ctc.wstx.returnNullForDefaultNamespace=true - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -XX:MaxPermSize=192m - -server - -Djava.awt.headless=true - -Djdk.corba.allowOutputStreamSubclass=true - -XX:+UnlockDiagnosticVMOptions - -Djava.endorsed.dirs=${com.sun.aas.installRoot}/modules/endorsed${path.separator}${com.sun.aas.installRoot}/lib/endorsed - -Djava.security.policy=${com.sun.aas.instanceRoot}/config/server.policy - -Djava.security.auth.login.config=${com.sun.aas.instanceRoot}/config/login.conf - -Dcom.sun.enterprise.security.httpsOutboundKeyAlias=s1as - -Djavax.net.ssl.keyStore=${com.sun.aas.instanceRoot}/config/keystore.jks - -Djavax.net.ssl.trustStore=${com.sun.aas.instanceRoot}/config/cacerts.jks - -Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext - -Djdbc.drivers=org.apache.derby.jdbc.ClientDriver - -DANTLR_USE_DIRECT_CLASS_LOADING=true - -Dcom.sun.enterprise.config.config_environment_factory_class=com.sun.enterprise.config.serverbeans.AppserverConfigEnvironmentFactory - -XX:NewRatio=2 - -Xmx512m - - -Dorg.glassfish.additionalOSGiBundlesToStart=org.apache.felix.shell,org.apache.felix.gogo.runtime,org.apache.felix.gogo.shell,org.apache.felix.gogo.command,org.apache.felix.fileinstall - - -Dosgi.shell.telnet.port=${OSGI_SHELL_TELNET_PORT} - - -Dosgi.shell.telnet.maxconn=1 - - -Dosgi.shell.telnet.ip=127.0.0.1 - - -Dgosh.args=--noshutdown -c noop=true - - -Dfelix.fileinstall.dir=${com.sun.aas.installRoot}/modules/autostart/ - - -Dfelix.fileinstall.poll=5000 - - -Dfelix.fileinstall.log.level=3 - - -Dfelix.fileinstall.bundles.new.start=true - - -Dfelix.fileinstall.bundles.startTransient=true - - -Dfelix.fileinstall.disableConfigSave=false - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/conf/docker-aio/dv/install/default.config b/conf/docker-aio/dv/install/default.config deleted file mode 100644 index 0b806a8714b..00000000000 --- a/conf/docker-aio/dv/install/default.config +++ /dev/null @@ -1,15 +0,0 @@ -HOST_DNS_ADDRESS localhost -GLASSFISH_DIRECTORY /opt/glassfish4 -ADMIN_EMAIL -MAIL_SERVER mail.hmdc.harvard.edu -POSTGRES_ADMIN_PASSWORD secret -POSTGRES_SERVER db -POSTGRES_PORT 5432 -POSTGRES_DATABASE dvndb -POSTGRES_USER dvnapp -POSTGRES_PASSWORD secret -SOLR_LOCATION idx -RSERVE_HOST localhost -RSERVE_PORT 6311 -RSERVE_USER rserve -RSERVE_PASSWORD rserve diff --git a/conf/docker-aio/dv/pg_hba.conf b/conf/docker-aio/dv/pg_hba.conf deleted file mode 100644 index 77feba5247d..00000000000 --- a/conf/docker-aio/dv/pg_hba.conf +++ /dev/null @@ -1,91 +0,0 @@ -# PostgreSQL Client Authentication Configuration File -# =================================================== -# -# Refer to the "Client Authentication" section in the PostgreSQL -# documentation for a complete description of this file. A short -# synopsis follows. -# -# This file controls: which hosts are allowed to connect, how clients -# are authenticated, which PostgreSQL user names they can use, which -# databases they can access. Records take one of these forms: -# -# local DATABASE USER METHOD [OPTIONS] -# host DATABASE USER ADDRESS METHOD [OPTIONS] -# hostssl DATABASE USER ADDRESS METHOD [OPTIONS] -# hostnossl DATABASE USER ADDRESS METHOD [OPTIONS] -# -# (The uppercase items must be replaced by actual values.) -# -# The first field is the connection type: "local" is a Unix-domain -# socket, "host" is either a plain or SSL-encrypted TCP/IP socket, -# "hostssl" is an SSL-encrypted TCP/IP socket, and "hostnossl" is a -# plain TCP/IP socket. -# -# DATABASE can be "all", "sameuser", "samerole", "replication", a -# database name, or a comma-separated list thereof. The "all" -# keyword does not match "replication". Access to replication -# must be enabled in a separate record (see example below). -# -# USER can be "all", a user name, a group name prefixed with "+", or a -# comma-separated list thereof. In both the DATABASE and USER fields -# you can also write a file name prefixed with "@" to include names -# from a separate file. -# -# ADDRESS specifies the set of hosts the record matches. It can be a -# host name, or it is made up of an IP address and a CIDR mask that is -# an integer (between 0 and 32 (IPv4) or 128 (IPv6) inclusive) that -# specifies the number of significant bits in the mask. A host name -# that starts with a dot (.) matches a suffix of the actual host name. -# Alternatively, you can write an IP address and netmask in separate -# columns to specify the set of hosts. Instead of a CIDR-address, you -# can write "samehost" to match any of the server's own IP addresses, -# or "samenet" to match any address in any subnet that the server is -# directly connected to. -# -# METHOD can be "trust", "reject", "md5", "password", "gss", "sspi", -# "krb5", "ident", "peer", "pam", "ldap", "radius" or "cert". Note that -# "password" sends passwords in clear text; "md5" is preferred since -# it sends encrypted passwords. -# -# OPTIONS are a set of options for the authentication in the format -# NAME=VALUE. The available options depend on the different -# authentication methods -- refer to the "Client Authentication" -# section in the documentation for a list of which options are -# available for which authentication methods. -# -# Database and user names containing spaces, commas, quotes and other -# special characters must be quoted. Quoting one of the keywords -# "all", "sameuser", "samerole" or "replication" makes the name lose -# its special character, and just match a database or username with -# that name. -# -# This file is read on server startup and when the postmaster receives -# a SIGHUP signal. If you edit the file on a running system, you have -# to SIGHUP the postmaster for the changes to take effect. You can -# use "pg_ctl reload" to do that. - -# Put your actual configuration here -# ---------------------------------- -# -# If you want to allow non-local connections, you need to add more -# "host" records. In that case you will also need to make PostgreSQL -# listen on a non-local interface via the listen_addresses -# configuration parameter, or via the -i or -h command line switches. - - - -# TYPE DATABASE USER ADDRESS METHOD - -# "local" is for Unix domain socket connections only -#local all all peer -local all all trust -# IPv4 local connections: -#host all all 127.0.0.1/32 trust -host all all 0.0.0.0/0 trust -# IPv6 local connections: -host all all ::1/128 trust -# Allow replication connections from localhost, by a user with the -# replication privilege. -#local replication postgres peer -#host replication postgres 127.0.0.1/32 ident -#host replication postgres ::1/128 ident diff --git a/conf/docker-aio/entrypoint.bash b/conf/docker-aio/entrypoint.bash deleted file mode 100755 index 236bb30f67a..00000000000 --- a/conf/docker-aio/entrypoint.bash +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env bash -export LANG=en_US.UTF-8 -sudo -u postgres /usr/pgsql-13/bin/pg_ctl start -D /var/lib/pgsql/13/data & -cd /opt/solr-8.11.1/ -# TODO: Run Solr as non-root and remove "-force". -bin/solr start -force -bin/solr create_core -c collection1 -d server/solr/collection1/conf -force - -# start apache, in both foreground and background... -apachectl -DFOREGROUND & - -# TODO: Run Payara as non-root. -cd /opt/payara5 -bin/asadmin start-domain --debug -sleep infinity - diff --git a/conf/docker-aio/httpd.conf b/conf/docker-aio/httpd.conf deleted file mode 100644 index 85c851d785f..00000000000 --- a/conf/docker-aio/httpd.conf +++ /dev/null @@ -1,27 +0,0 @@ - -Include conf.d/*.conf -Include conf.modules.d/*.conf -ServerName localhost -Listen 80 443 -PidFile run/httpd.pid -DocumentRoot "/var/www/html" -TypesConfig /etc/mime.types -User apache -Group apache - - - ServerName localhost - LogLevel debug - ErrorLog logs/error_log - LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined - CustomLog logs/access_log combined - - # proxy config (aka - what to send to glassfish or not) - ProxyPassMatch ^/Shibboleth.sso ! - ProxyPassMatch ^/shibboleth-ds ! - # pass everything else to Glassfish - ProxyPass / ajp://localhost:8009/ -# glassfish can be slow sometimes - ProxyTimeout 300 - - diff --git a/conf/docker-aio/install.bash b/conf/docker-aio/install.bash deleted file mode 100755 index 2b3275ad830..00000000000 --- a/conf/docker-aio/install.bash +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/env bash -sudo -u postgres createuser --superuser dvnapp -#./entrypoint.bash & -unzip dvinstall.zip -cd dvinstall/ -echo "beginning installer" -./install -admin_email=dvAdmin@mailinator.com -y -f > install.out 2> install.err - -echo "installer complete" -cat install.err diff --git a/conf/docker-aio/prep_it.bash b/conf/docker-aio/prep_it.bash deleted file mode 100755 index adb257e43b1..00000000000 --- a/conf/docker-aio/prep_it.bash +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env bash - -# run through all the steps to setup docker-aio to run integration tests - -# hard-codes several assumptions: image is named dv0, container is named dv, port is 8084 - -# glassfish healthy/ready retries -n_wait=5 - -cd conf/docker-aio -./0prep_deps.sh -./1prep.sh -docker build -t dv0 -f c8.dockerfile . -# cleanup from previous runs if necessary -docker rm -f dv -# start container -docker run -d -p 8084:80 -p 8083:8080 -p 9010:9009 --name dv dv0 -# wait for glassfish to be healthy -i_wait=0 -d_wait=10 -while [ $i_wait -lt $n_wait ] -do - h=`docker inspect -f "{{.State.Health.Status}}" dv` - if [ "healthy" == "${h}" ]; then - break - else - sleep $d_wait - fi - i_wait=$(( $i_wait + 1 )) - -done -# try setupIT.bash -docker exec dv /opt/dv/setupIT.bash -err=$? -if [ $err -ne 0 ]; then - echo "error - setupIT failure" - exit 1 -fi -# configure DOI provider based on docker build arguments / environmental variables -docker exec dv /opt/dv/configure_doi.bash -err=$? -if [ $err -ne 0 ]; then - echo "error - DOI configuration failure" - exit 1 -fi -# handle config for the private url test (and things like publishing...) -./seturl.bash - - -cd ../.. -#echo "docker-aio ready to run integration tests ($i_retry)" -echo "docker-aio ready to run integration tests" -curl http://localhost:8084/api/info/version -echo $? - diff --git a/conf/docker-aio/readme.md b/conf/docker-aio/readme.md deleted file mode 100644 index f3031a5bb6e..00000000000 --- a/conf/docker-aio/readme.md +++ /dev/null @@ -1,64 +0,0 @@ -# Docker All-In-One - -> :information_source: **NOTE: Sunsetting of this module is imminent.** There is no schedule yet, but expect it to go away. -> Please let the [Dataverse Containerization Working Group](https://ct.gdcc.io) know if you are a user and -> what should be preserved. - -First pass docker all-in-one image, intended for running integration tests against. -Also usable for normal development and system evaluation; not intended for production. - -### Requirements: - - java11 compiler, maven, make, wget, docker - -### Quickstart: - - in the root of the repository, run `./conf/docker-aio/prep_it.bash` - - if using DataCite test credentials, update the build args appropriately. - - if all goes well, you should see the results of the `api/info/version` endpoint, including the deployed build (eg `{"status":"OK","data":{"version":"4.8.6","build":"develop-c3e9f40"}}`). If not, you may need to read the non-quickstart instructions. - - run integration tests: `./conf/docker-aio/run-test-suite.sh` - ----- - -## More in-depth documentation: - - -### Initial setup (aka - do once): -- `cd conf/docker-aio` and run `./0prep_deps.sh` to created Payara and Solr tarballs in `conf/docker-aio/dv/deps`. - -### Per-build: - -> Note: If you encounter any issues, see the Troubleshooting section at the end of this document. - -#### Setup - -- `cd conf/docker-aio`, and run `./1prep.sh` to copy files for integration test data into docker build context; `1prep.sh` will also build the war file and installation zip file -- build the docker image: `docker build -t dv0 -f c8.dockerfile .` - -- Run image: `docker run -d -p 8083:8080 -p 8084:80 --name dv dv0` (aka - forward port 8083 locally to 8080 in the container for payara, and 8084 to 80 for apache); if you'd like to connect a java debugger to payara, use `docker run -d -p 8083:8080 -p 8084:80 -p 9010:9009 --name dv dv0` - -- Installation (integration test): `docker exec dv /opt/dv/setupIT.bash` - (Note that it's possible to customize the installation by editing `conf/docker-aio/default.config` and running `docker exec dv /opt/dv/install.bash` but for the purposes of integration testing, the `setupIT.bash` script above works fine.) - -- update `dataverse.siteUrl` (appears only necessary for `DatasetsIT.testPrivateUrl`): `docker exec dv /usr/local/glassfish4/bin/asadmin create-jvm-options "-Ddataverse.siteUrl=http\://localhost\:8084"` (or use the provided `seturl.bash`) - -#### Run integration tests: - -First, cd back to the root of the repo where the `pom.xml` file is (`cd ../..` assuming you're still in the `conf/docker-aio` directory). Then run the test suite with script below: - -`conf/docker-aio/run-test-suite.sh` - -There isn't any strict requirement on the local port (8083, 8084 in this doc), the name of the image (dv0) or container (dv), these can be changed as desired as long as they are consistent. - -### Troubleshooting Notes: - -* If Dataverse' build fails due to an error about `Module` being ambiguous, you might be using a Java 9 compiler. - -* If you see an error like this: - ``` - docker: Error response from daemon: Conflict. The container name "/dv" is already in use by container "5f72a45b68c86c7b0f4305b83ce7d663020329ea4e30fa2a3ce9ddb05223533d" - You have to remove (or rename) that container to be able to reuse that name. - ``` - run something like `docker ps -a | grep dv` to see the container left over from the last run and something like `docker rm 5f72a45b68c8` to remove it. Then try the `docker run` command above again. - -* `empty reply from server` or `Failed to connect to ::1: Cannot assign requested address` tend to indicate either that you haven't given payara enough time to start, or your docker setup is in an inconsistent state and should probably be restarted. - -* For manually fiddling around with the created dataverse, use user `dataverseAdmin` with password `admin1`. diff --git a/conf/docker-aio/run-test-suite.sh b/conf/docker-aio/run-test-suite.sh deleted file mode 100755 index 39809a7a50e..00000000000 --- a/conf/docker-aio/run-test-suite.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash -# This is the canonical list of which "IT" tests are expected to pass. - -dvurl=$1 -if [ -z "$dvurl" ]; then - dvurl="http://localhost:8084" -fi - -integrationtests=$( - ServerName localhost - LogLevel debug - ErrorLog logs/error_log - LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined - CustomLog logs/access_log combined - - # proxy config (aka - what to send to glassfish or not) - ProxyPassMatch ^/Shibboleth.sso ! - ProxyPassMatch ^/shibboleth-ds ! - # pass everything else to Glassfish - ProxyPass / ajp://localhost:8009/ -# glassfish can be slow sometimes - ProxyTimeout 300 - - diff --git a/conf/docker-aio/testscripts/db.sh b/conf/docker-aio/testscripts/db.sh deleted file mode 100755 index f0a9e409fd7..00000000000 --- a/conf/docker-aio/testscripts/db.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/sh -psql -U postgres -c "CREATE ROLE dvnapp PASSWORD 'secret' SUPERUSER CREATEDB CREATEROLE INHERIT LOGIN" template1 -psql -U dvnapp -c 'CREATE DATABASE "dvndb" WITH OWNER = "dvnapp"' template1 diff --git a/conf/docker-aio/testscripts/install b/conf/docker-aio/testscripts/install deleted file mode 100755 index f87f180b554..00000000000 --- a/conf/docker-aio/testscripts/install +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/sh -export HOST_ADDRESS=localhost -export GLASSFISH_ROOT=/opt/payara5 -export FILES_DIR=/opt/payara5/glassfish/domains/domain1/files -export DB_NAME=dvndb -export DB_PORT=5432 -export DB_HOST=localhost -export DB_USER=dvnapp -export DB_PASS=secret -export RSERVE_HOST=localhost -export RSERVE_PORT=6311 -export RSERVE_USER=rserve -export RSERVE_PASS=rserve -export SMTP_SERVER=localhost -export MEM_HEAP_SIZE=2048 -export GLASSFISH_DOMAIN=domain1 -cd scripts/installer -#cp ../../conf/jhove/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf -cp /opt/dv/testdata/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf -cp /opt/dv/testdata/jhoveConfig.xsd $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhoveConfig.xsd -./as-setup.sh dvndb diff --git a/conf/docker-aio/testscripts/post b/conf/docker-aio/testscripts/post deleted file mode 100755 index 0f292109d31..00000000000 --- a/conf/docker-aio/testscripts/post +++ /dev/null @@ -1,13 +0,0 @@ -#/bin/sh -cd scripts/api -./setup-all.sh --insecure -p=admin1 | tee /tmp/setup-all.sh.out -cd ../.. -psql -U dvnapp dvndb -f doc/sphinx-guides/source/_static/util/createsequence.sql -scripts/search/tests/publish-dataverse-root -#git checkout scripts/api/data/dv-root.json -scripts/search/tests/grant-authusers-add-on-root -scripts/search/populate-users -scripts/search/create-users -scripts/search/tests/create-all-and-test -scripts/search/tests/publish-spruce1-and-test -#java -jar downloads/schemaSpy_5.0.0.jar -t pgsql -host localhost -db dvndb -u postgres -p secret -s public -dp scripts/installer/pgdriver/postgresql-9.1-902.jdbc4.jar -o /var/www/html/schemaspy/latest diff --git a/conf/docker-dcm/.gitignore b/conf/docker-dcm/.gitignore deleted file mode 100644 index ac39981ce6a..00000000000 --- a/conf/docker-dcm/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -*.rpm -upload*.bash diff --git a/conf/docker-dcm/0prep.sh b/conf/docker-dcm/0prep.sh deleted file mode 100755 index 300aa39d567..00000000000 --- a/conf/docker-dcm/0prep.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/sh -DCM_VERSION=0.5 -RSAL_VERSION=0.1 - -if [ ! -e dcm-${DCM_VERSION}-0.noarch.rpm ]; then - wget https://github.com/sbgrid/data-capture-module/releases/download/${DCM_VERSION}/dcm-${DCM_VERSION}-0.noarch.rpm -fi - -if [ ! -e rsal-${RSAL_VERSION}-0.noarch.rpm ] ;then - wget https://github.com/sbgrid/rsal/releases/download/${RSAL_VERSION}/rsal-${RSAL_VERSION}-0.noarch.rpm -fi diff --git a/conf/docker-dcm/c6client.dockerfile b/conf/docker-dcm/c6client.dockerfile deleted file mode 100644 index e4d1ae7da82..00000000000 --- a/conf/docker-dcm/c6client.dockerfile +++ /dev/null @@ -1,7 +0,0 @@ -# build from repo root -FROM centos:6 -RUN yum install -y epel-release -RUN yum install -y rsync openssh-clients jq curl wget lynx -RUN useradd depositor -USER depositor -WORKDIR /home/depositor diff --git a/conf/docker-dcm/cfg/dcm/bashrc b/conf/docker-dcm/cfg/dcm/bashrc deleted file mode 100644 index 07137ab8471..00000000000 --- a/conf/docker-dcm/cfg/dcm/bashrc +++ /dev/null @@ -1,18 +0,0 @@ -# .bashrc - -# User specific aliases and functions - -alias rm='rm -i' -alias cp='cp -i' -alias mv='mv -i' - -# Source global definitions -if [ -f /etc/bashrc ]; then - . /etc/bashrc -fi - -# these are dummy values, obviously -export UPLOADHOST=dcmsrv -export DVAPIKEY=burrito -export DVHOSTINT=dvsrv -export DVHOST=dvsrv diff --git a/conf/docker-dcm/cfg/dcm/entrypoint-dcm.sh b/conf/docker-dcm/cfg/dcm/entrypoint-dcm.sh deleted file mode 100755 index 0db674bfac4..00000000000 --- a/conf/docker-dcm/cfg/dcm/entrypoint-dcm.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh - -/etc/init.d/sshd start -/etc/init.d/redis start -/etc/init.d/rq start -lighttpd -D -f /etc/lighttpd/lighttpd.conf diff --git a/conf/docker-dcm/cfg/dcm/healthcheck-dcm.sh b/conf/docker-dcm/cfg/dcm/healthcheck-dcm.sh deleted file mode 100755 index 3964a79391e..00000000000 --- a/conf/docker-dcm/cfg/dcm/healthcheck-dcm.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh - -r_rq=`/etc/init.d/rq status` -if [ "rq_worker running" != "$r_rq" ]; then - echo "rq failed" - exit 1 -fi -r_www=`/etc/init.d/lighttpd status` -e_www=$? -if [ 0 -ne $e_www ]; then - echo "lighttpd failed" - exit 2 -fi - diff --git a/conf/docker-dcm/cfg/dcm/rq-init-d b/conf/docker-dcm/cfg/dcm/rq-init-d deleted file mode 100755 index 093cd894376..00000000000 --- a/conf/docker-dcm/cfg/dcm/rq-init-d +++ /dev/null @@ -1,57 +0,0 @@ -#!/bin/bash - -# chkconfig: 2345 90 60 -# description: rq worker script (single worker process) - -# example rq configuration file (to be placed in /etc/init.d) - -# works on cent6 - -DAEMON=rq_worker -DAEMON_PATH=/opt/dcm/gen/ -export UPLOADHOST=dcmsrv -VIRTUALENV= -LOGFILE=/var/log/${DAEMON}.log -PIDFILE=/var/run/${DAEMON}.pid - -case "$1" in -start) - printf "%-50s" "starting $DAEMON..." - cd $DAEMON_PATH - if [ ! -z "$VIRTUALENV" ]; then - source $VIRTUALENV/bin/activate - fi - rq worker normal --pid $PIDFILE > ${LOGFILE} 2>&1 & -;; -status) - if [ -f $PIDFILE ]; then - PID=`cat $PIDFILE` - if [ -z "`ps axf | grep ${PID} | grep -v grep`" ]; then - printf "%s\n" "$DAEMON not running, but PID file ($PIDFILE) exists" - else - echo "$DAEMON running" - fi - else - printf "%s\n" "$DAEMON not running" - fi -;; -stop) - printf "%-50s" "stopping $DAEMON" - if [ -f $PIDFILE ]; then - PID=`cat $PIDFILE` - kill -HUP $PID - rm -f $PIDFILE - else - printf "%s\n" "no PID file ($PIDFILE) - maybe not running" - fi -;; -restart) - $0 stop - $0 start -;; - -*) - echo "Usage: $0 {status|start|stop|restart}" - exit 1 -esac - diff --git a/conf/docker-dcm/cfg/dcm/test_install.sh b/conf/docker-dcm/cfg/dcm/test_install.sh deleted file mode 100755 index 3026ceb9fa5..00000000000 --- a/conf/docker-dcm/cfg/dcm/test_install.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/sh - -cp /etc/dcm/rq-init-d /etc/init.d/rq -cp /etc/dcm/lighttpd-conf-dcm /etc/lighttpd/lighttpd.conf -cp /etc/dcm/lighttpd-modules-dcm /etc/lighttpd/modules.conf -cp /etc/dcm/dcm-rssh.conf /etc/rssh.conf - diff --git a/conf/docker-dcm/cfg/rsal/entrypoint-rsal.sh b/conf/docker-dcm/cfg/rsal/entrypoint-rsal.sh deleted file mode 100755 index 92466c3bd4b..00000000000 --- a/conf/docker-dcm/cfg/rsal/entrypoint-rsal.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh - -#/usr/bin/rsync --no-detach --daemon --config /etc/rsyncd.conf -/usr/bin/rsync --daemon --config /etc/rsyncd.conf -lighttpd -D -f /etc/lighttpd/lighttpd.conf diff --git a/conf/docker-dcm/cfg/rsal/lighttpd-modules.conf b/conf/docker-dcm/cfg/rsal/lighttpd-modules.conf deleted file mode 100644 index cdb1438af82..00000000000 --- a/conf/docker-dcm/cfg/rsal/lighttpd-modules.conf +++ /dev/null @@ -1,174 +0,0 @@ -####################################################################### -## -## ansible managed -# -## Modules to load -## ----------------- -## -## at least mod_access and mod_accesslog should be loaded -## all other module should only be loaded if really neccesary -## -## - saves some time -## - saves memory -## -## the default module set contains: -## -## "mod_indexfile", "mod_dirlisting", "mod_staticfile" -## -## you dont have to include those modules in your list -## -## Modules, which are pulled in via conf.d/*.conf -## -## NOTE: the order of modules is important. -## -## - mod_accesslog -> conf.d/access_log.conf -## - mod_compress -> conf.d/compress.conf -## - mod_status -> conf.d/status.conf -## - mod_webdav -> conf.d/webdav.conf -## - mod_cml -> conf.d/cml.conf -## - mod_evhost -> conf.d/evhost.conf -## - mod_simple_vhost -> conf.d/simple_vhost.conf -## - mod_mysql_vhost -> conf.d/mysql_vhost.conf -## - mod_trigger_b4_dl -> conf.d/trigger_b4_dl.conf -## - mod_userdir -> conf.d/userdir.conf -## - mod_rrdtool -> conf.d/rrdtool.conf -## - mod_ssi -> conf.d/ssi.conf -## - mod_cgi -> conf.d/cgi.conf -## - mod_scgi -> conf.d/scgi.conf -## - mod_fastcgi -> conf.d/fastcgi.conf -## - mod_proxy -> conf.d/proxy.conf -## - mod_secdownload -> conf.d/secdownload.conf -## - mod_expire -> conf.d/expire.conf -## - -server.modules = ( - "mod_access", -# "mod_alias", -# "mod_auth", -# "mod_evasive", -# "mod_redirect", -# "mod_rewrite", -# "mod_setenv", -# "mod_usertrack", -) - -## -####################################################################### - -####################################################################### -## -## Config for various Modules -## - -## -## mod_ssi -## -#include "conf.d/ssi.conf" - -## -## mod_status -## -#include "conf.d/status.conf" - -## -## mod_webdav -## -#include "conf.d/webdav.conf" - -## -## mod_compress -## -#include "conf.d/compress.conf" - -## -## mod_userdir -## -#include "conf.d/userdir.conf" - -## -## mod_magnet -## -#include "conf.d/magnet.conf" - -## -## mod_cml -## -#include "conf.d/cml.conf" - -## -## mod_rrdtool -## -#include "conf.d/rrdtool.conf" - -## -## mod_proxy -## -#include "conf.d/proxy.conf" - -## -## mod_expire -## -#include "conf.d/expire.conf" - -## -## mod_secdownload -## -#include "conf.d/secdownload.conf" - -## -####################################################################### - -####################################################################### -## -## CGI modules -## - -## -## SCGI (mod_scgi) -## -#include "conf.d/scgi.conf" - -## -## FastCGI (mod_fastcgi) -## -#include "conf.d/fastcgi.conf" - -## -## plain old CGI (mod_cgi) -## -include "conf.d/cgi.conf" - -## -####################################################################### - -####################################################################### -## -## VHost Modules -## -## Only load ONE of them! -## ======================== -## - -## -## You can use conditionals for vhosts aswell. -## -## see http://www.lighttpd.net/documentation/configuration.html -## - -## -## mod_evhost -## -#include "conf.d/evhost.conf" - -## -## mod_simple_vhost -## -#include "conf.d/simple_vhost.conf" - -## -## mod_mysql_vhost -## -#include "conf.d/mysql_vhost.conf" - -## -####################################################################### diff --git a/conf/docker-dcm/cfg/rsal/lighttpd.conf b/conf/docker-dcm/cfg/rsal/lighttpd.conf deleted file mode 100644 index 5874d60eb48..00000000000 --- a/conf/docker-dcm/cfg/rsal/lighttpd.conf +++ /dev/null @@ -1,43 +0,0 @@ -## lighttpd configuration customized for RSAL; centos7 - -# refuse connections not from frontend or localhost -# DO NOT HAVE THIS OPEN TO THE WORLD!!! -#$HTTP["remoteip"] !~ "192.168.2.2|127.0.0.1" { -#url.access-deny = ("") -#} -server.breakagelog = "/var/log/lighttpd/breakage.log" - -####################################################################### -## -## Some Variable definition which will make chrooting easier. -## -## if you add a variable here. Add the corresponding variable in the -## chroot example aswell. -## -var.log_root = "/var/log/lighttpd" -var.server_root = "/opt/rsal/api" -var.state_dir = "/var/run" -var.home_dir = "/var/lib/lighttpd" -var.conf_dir = "/etc/lighttpd" - -var.cache_dir = "/var/cache/lighttpd" -var.socket_dir = home_dir + "/sockets" -include "modules.conf" -server.port = 80 -server.use-ipv6 = "disable" -server.username = "lighttpd" -server.groupname = "lighttpd" -server.document-root = server_root -server.pid-file = state_dir + "/lighttpd.pid" -server.errorlog = log_root + "/error.log" -include "conf.d/access_log.conf" -include "conf.d/debug.conf" -server.event-handler = "linux-sysepoll" -server.network-backend = "linux-sendfile" -server.stat-cache-engine = "simple" -server.max-connections = 1024 -static-file.exclude-extensions = ( ".php", ".pl", ".fcgi", ".scgi" ) -include "conf.d/mime.conf" -include "conf.d/dirlisting.conf" -server.follow-symlink = "enable" -server.upload-dirs = ( "/var/tmp" ) diff --git a/conf/docker-dcm/cfg/rsal/rsyncd.conf b/conf/docker-dcm/cfg/rsal/rsyncd.conf deleted file mode 100644 index 5a15ab28a12..00000000000 --- a/conf/docker-dcm/cfg/rsal/rsyncd.conf +++ /dev/null @@ -1,8 +0,0 @@ -lock file=/var/run/rsync.lock -log file=/var/log/rsyncd.log -pid file=/var/log/rsyncd.pid - -[10.5072] - path=/public/ - read only=yes - diff --git a/conf/docker-dcm/configure_dcm.sh b/conf/docker-dcm/configure_dcm.sh deleted file mode 100755 index 5b65b0a0314..00000000000 --- a/conf/docker-dcm/configure_dcm.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/sh - -echo "dcm configs on dv side to be done" - -# in homage to dataverse traditions, reset to insecure "burrito" admin API key -sudo -u postgres psql -c "update apitoken set tokenstring='burrito' where id=1;" dvndb -sudo -u postgres psql -c "update authenticateduser set superuser='t' where id=1;" dvndb - -# dataverse configs for DCM -curl -X PUT -d "SHA-1" "http://localhost:8080/api/admin/settings/:FileFixityChecksumAlgorithm" -curl -X PUT "http://localhost:8080/api/admin/settings/:UploadMethods" -d "dcm/rsync+ssh" -curl -X PUT "http://localhost:8080/api/admin/settings/:DataCaptureModuleUrl" -d "http://dcmsrv" - -# configure for RSAL downloads; but no workflows or RSAL yet -curl -X PUT "http://localhost:8080/api/admin/settings/:DownloadMethods" -d "rsal/rsync" - -# publish root dataverse -curl -X POST -H "X-Dataverse-key: burrito" "http://localhost:8080/api/dataverses/root/actions/:publish" - -# symlink `hold` volume -mkdir -p /usr/local/glassfish4/glassfish/domains/domain1/files/ -ln -s /hold /usr/local/glassfish4/glassfish/domains/domain1/files/10.5072 - -# need to set siteUrl -cd /usr/local/glassfish4 -bin/asadmin create-jvm-options "\"-Ddataverse.siteUrl=http\://localhost\:8084\"" diff --git a/conf/docker-dcm/configure_rsal.sh b/conf/docker-dcm/configure_rsal.sh deleted file mode 100755 index 5db43a34381..00000000000 --- a/conf/docker-dcm/configure_rsal.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/sh - -fn=rsal-workflow2.json -# needs an actual IP (vs a hostname) for whitelist -rsalip=`dig +short rsalsrv` - -# create workflow -curl -s -X POST -H "Content-type: application/json" -d @${fn} "http://localhost:8080/api/admin/workflows" - -# put rsal on the whitelist -curl -X PUT -d "127.0.0.1;${rsalip}" "http://localhost:8080/api/admin/workflows/ip-whitelist" - -# set workflow as default -curl -X PUT -d "1" "http://localhost:8080/api/admin/workflows/default/PrePublishDataset" - -# local access path -curl -X PUT -d "/hpc/storage" "http://localhost:8080/api/admin/settings/:LocalDataAccessPath" - -# storage sites -curl -X POST -H "Content-type: application/json" --upload-file site-primary.json "http://localhost:8080/api/admin/storageSites" -curl -X POST -H "Content-type: application/json" --upload-file site-remote.json "http://localhost:8080/api/admin/storageSites" diff --git a/conf/docker-dcm/create.bash b/conf/docker-dcm/create.bash deleted file mode 100755 index 58ae6e61dc7..00000000000 --- a/conf/docker-dcm/create.bash +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash - - -# user creates dataset -k_d=burrito -dv_d=root -h=http://dvsrv - -fn=dataset.json -#dset_id=`curl -s -H "X-Dataverse-key: $k_d" -X POST --upload-file $fn $h/api/dataverses/$dv_d/datasets | jq .data.id` -r=`curl -s -H "X-Dataverse-key: $k_d" -X POST --upload-file $fn $h/api/dataverses/$dv_d/datasets` -echo $r -dset_id=`echo $r | jq .data.id` -echo "dataset created with id: $dset_id" - -if [ "null" == "${dset_id}" ]; then - echo "error - no dataset id from create command" - exit 1 -fi -echo "dataset created; internal/db id: ${dset_id}" - - diff --git a/conf/docker-dcm/dataset.json b/conf/docker-dcm/dataset.json deleted file mode 100644 index fb1b734ed40..00000000000 --- a/conf/docker-dcm/dataset.json +++ /dev/null @@ -1,126 +0,0 @@ -{ - "datasetVersion": { - "metadataBlocks": { - "citation": { - "displayName": "Citation Metadata", - "fields": [ - { - "typeName": "title", - "multiple": false, - "typeClass": "primitive", - "value": "DCM test dataset" - }, - { - "typeName": "productionDate", - "multiple": false, - "typeClass": "primitive", - "value": "2017-04-01" - }, - { - "typeName": "dsDescription", - "multiple": true, - "typeClass": "compound", - "value": [ - { - "dsDescriptionValue": { - "typeName": "dsDescriptionValue", - "multiple": false, - "typeClass": "primitive", - "value": "this would normally be a dataset large enough to require a DCM" - } - } - ] - }, - { - "typeName": "depositor", - "multiple": false, - "typeClass": "primitive", - "value": "Doc, Bob" - }, - { - "typeName": "producer", - "multiple": true, - "typeClass": "compound", - "value": [ - { - "producerName": { - "typeName": "producerName", - "multiple": false, - "typeClass": "primitive", - "value": "Prof, Arthor" - }, - "producerAffiliation": { - "typeName": "producerAffiliation", - "multiple": false, - "typeClass": "primitive", - "value": "LibraScholar" - } - } - ] - }, - { - "typeName": "author", - "multiple": true, - "typeClass": "compound", - "value": [ - { - "authorName": { - "typeName": "authorName", - "multiple": false, - "typeClass": "primitive", - "value": "Student, Carol" - } - , - "authorAffiliation": { - "typeName": "authorAffiliation", - "multiple": false, - "typeClass": "primitive", - "value": "LibraScholar" - } - }, - { - "authorName": { - "typeName": "authorName", - "multiple": false, - "typeClass": "primitive", - "value": "Doc, Bob" - } - , - "authorAffiliation": { - "typeName": "authorAffiliation", - "multiple": false, - "typeClass": "primitive", - "value": "LibraScholar" - } - } - - ] - }, - { - "typeName": "datasetContact", - "multiple": true, - "typeClass": "compound", - "value": [ - { - "datasetContactEmail": { - "typeName": "datasetContactEmail", - "multiple": false, - "typeClass": "primitive", - "value": "dsContact@mailinator.com" - } - } - ] - }, - { - "typeName": "subject", - "multiple": true, - "typeClass": "controlledVocabulary", - "value": [ - "Medicine, Health and Life Sciences" - ] - } - ] - } - } - } -} diff --git a/conf/docker-dcm/dcmsrv.dockerfile b/conf/docker-dcm/dcmsrv.dockerfile deleted file mode 100644 index 9989fa3a89d..00000000000 --- a/conf/docker-dcm/dcmsrv.dockerfile +++ /dev/null @@ -1,21 +0,0 @@ -# build from repo root -FROM centos:6 -RUN yum install -y epel-release -ARG RPMFILE=dcm-0.5-0.noarch.rpm -COPY ${RPMFILE} /tmp/ -COPY cfg/dcm/bashrc /root/.bashrc -COPY cfg/dcm/test_install.sh /root/ -RUN yum localinstall -y /tmp/${RPMFILE} -RUN pip install -r /opt/dcm/requirements.txt -RUN pip install awscli==1.15.75 -run export PATH=~/.local/bin:$PATH -RUN /root/test_install.sh -COPY cfg/dcm/rq-init-d /etc/init.d/rq -RUN useradd glassfish -COPY cfg/dcm/entrypoint-dcm.sh / -COPY cfg/dcm/healthcheck-dcm.sh / -EXPOSE 80 -EXPOSE 22 -VOLUME /hold -HEALTHCHECK CMD /healthcheck-dcm.sh -CMD ["/entrypoint-dcm.sh"] diff --git a/conf/docker-dcm/docker-compose.yml b/conf/docker-dcm/docker-compose.yml deleted file mode 100644 index 49d4467d349..00000000000 --- a/conf/docker-dcm/docker-compose.yml +++ /dev/null @@ -1,50 +0,0 @@ -# initial docker-compose file for combined Dataverse and DCM with shared filesystem - -version: '3' - -services: - dcmsrv: - build: - context: . - dockerfile: dcmsrv.dockerfile - container_name: dcmsrv - volumes: - - hold:/hold - rsalsrv: - build: - context: . - dockerfile: rsalsrv.dockerfile - container_name: rsalsrv -# image: rsalrepo_rsal - volumes: - - hold:/hold - - ./:/mnt - environment: - DV_HOST: http://dvsrv:8080 - DV_APIKEY: burrito - ports: - - "8889:80" - - "873:873" - dvsrv: - build: - context: . - dockerfile: dv0dcm.dockerfile - container_name: dvsrv - volumes: - - hold:/hold - - ./:/mnt - ports: - - "8083:8080" - - "8084:80" - client: - build: - context: . - dockerfile: c6client.dockerfile - command: sleep infinity - container_name: dcm_client - volumes: - - ./:/mnt - -volumes: - hold: - diff --git a/conf/docker-dcm/dv0dcm.dockerfile b/conf/docker-dcm/dv0dcm.dockerfile deleted file mode 100644 index 021534c8978..00000000000 --- a/conf/docker-dcm/dv0dcm.dockerfile +++ /dev/null @@ -1,7 +0,0 @@ -# dv0 assumed to be image name for docker-aio -FROM dv0 -RUN yum install -y bind-utils -COPY configure_dcm.sh /opt/dv/ -COPY configure_rsal.sh /opt/dv/ -COPY rsal-workflow2.json site-primary.json site-remote.json /opt/dv/ -VOLUME /hold diff --git a/conf/docker-dcm/get_transfer.bash b/conf/docker-dcm/get_transfer.bash deleted file mode 100755 index 42080f536e1..00000000000 --- a/conf/docker-dcm/get_transfer.bash +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env bash - -# user gets transfer script - -dset_id=$1 -if [ -z "$dset_id" ]; then - echo "no dataset id specified, bailing out" - exit 1 -fi - -k_d=burrito -dv_d=root - -h=http://dvsrv - -#get upload script from DCM -wget --header "X-Dataverse-key: ${k_d}" ${h}/api/datasets/${dset_id}/dataCaptureModule/rsync -O upload-${dset_id}.bash - - diff --git a/conf/docker-dcm/publish_major.bash b/conf/docker-dcm/publish_major.bash deleted file mode 100755 index 6a3fd1288ca..00000000000 --- a/conf/docker-dcm/publish_major.bash +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env bash - -# publish dataset based on database id - -dset_id=$1 -if [ -z "$dset_id" ]; then - echo "no dataset id specified, bailing out" - exit 1 -fi - -k_d=burrito - -h=http://dvsrv - -curl -X POST -H "X-Dataverse-key: ${k_d}" "${h}/api/datasets/${dset_id}/actions/:publish?type=major" - - diff --git a/conf/docker-dcm/readme.md b/conf/docker-dcm/readme.md deleted file mode 100644 index 3e6a15e61d6..00000000000 --- a/conf/docker-dcm/readme.md +++ /dev/null @@ -1,26 +0,0 @@ -This docker-compose setup is intended for use in development, small scale evaluation, and potentially serve as an example of a working (although not production security level) configuration. - -Setup: - -- build docker-aio image with name dv0 as described in `../docker-aio` (don't start up the docker image or run setupIT.bash) -- work in the `conf/docker-dcm` directory for below commands -- download/prepare dependencies: `./0prep.sh` -- build dcm/dv0dcm images with docker-compose: `docker-compose -f docker-compose.yml build` -- start containers: `docker-compose -f docker-compose.yml up -d` -- wait for container to show "healthy" (aka - `docker ps`), then run dataverse app installation: `docker exec dvsrv /opt/dv/install.bash` -- for development, you probably want to use the `FAKE` DOI provider: `docker exec -it dvsrv /opt/dv/configure_doi.bash` -- configure dataverse application to use DCM: `docker exec -it dvsrv /opt/dv/configure_dcm.sh` -- configure dataverse application to use RSAL (if desired): `docker exec -it dvsrv /opt/dv/configure_rsal.sh` - -Operation: -The dataverse installation is accessible at `http://localhost:8084`. -The `dcm_client` container is intended to be used for executing transfer scripts, and `conf/docker-dcm` is available at `/mnt` inside the container; this container can be accessed with `docker exec -it dcm_client bash`. -The DCM cron job is NOT configured here; for development purposes the DCM checks can be run manually with `docker exec -it dcmsrv /opt/dcm/scn/post_upload.bash`. -The RSAL cron job is similarly NOT configured; for development purposes `docker exec -it rsalsrv /opt/rsal/scn/pub.py` can be run manually. - - -Cleanup: -- shutdown/cleanup `docker-compose -f docker-compose.yml down -v` - -For reference, this configuration was working with docker 17.09 / docker-compose 1.16. - diff --git a/conf/docker-dcm/rsal-workflow2.json b/conf/docker-dcm/rsal-workflow2.json deleted file mode 100644 index 322d3ecbcf7..00000000000 --- a/conf/docker-dcm/rsal-workflow2.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "name": "RSAL file move for publication", - "steps": [ - { - "provider":":internal", - "stepType":"log", - "parameters": { - "message": "Pre-http request" - } - }, - { - "provider":":internal", - "stepType":"http/sr", - "parameters": { - "url":"http://rsalsrv/rr.py", - "method":"POST", - "contentType":"text/plain", - "body":"${invocationId}\ndataset.id=${dataset.id}\ndataset.identifier=${dataset.identifier}\ndataset.globalId=${dataset.globalId}", - "expectedResponse":"OK.*", - "rollbackMethod":"DELETE" - } - }, - { - "provider":":internal", - "stepType":"log", - "parameters": { - "message": "Post-http request" - } - } - ] -} diff --git a/conf/docker-dcm/rsalsrv.dockerfile b/conf/docker-dcm/rsalsrv.dockerfile deleted file mode 100644 index 844432afe6b..00000000000 --- a/conf/docker-dcm/rsalsrv.dockerfile +++ /dev/null @@ -1,20 +0,0 @@ -FROM centos:7 -ARG RPMFILE=rsal-0.1-0.noarch.rpm -RUN yum update; yum install -y epel-release -COPY ${RPMFILE} /tmp/ -RUN yum localinstall -y /tmp/${RPMFILE} -COPY cfg/rsal/rsyncd.conf /etc/rsyncd.conf -COPY cfg/rsal/entrypoint-rsal.sh /entrypoint.sh -COPY cfg/rsal/lighttpd-modules.conf /etc/lighttpd/modules.conf -COPY cfg/rsal/lighttpd.conf /etc/lighttpd/lighttpd.conf -RUN mkdir -p /public/FK2 -RUN pip2 install -r /opt/rsal/scn/requirements.txt -#COPY doc/testdata/ /hold/ -ARG DV_HOST=http://dv_srv:8080 -ARG DV_API_KEY=burrito -ENV DV_HOST ${DV_HOST} -ENV DV_API_KEY ${DV_API_KEY} -EXPOSE 873 -EXPOSE 80 -HEALTHCHECK CMD curl --fail http://localhost/hw.py || exit 1 -CMD ["/entrypoint.sh"] diff --git a/conf/docker-dcm/site-primary.json b/conf/docker-dcm/site-primary.json deleted file mode 100644 index 35b217edffd..00000000000 --- a/conf/docker-dcm/site-primary.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "hostname": "rsalsrv", - "name": "LibraScholar University", - "primaryStorage": true, - "transferProtocols": "rsync,posix" -} diff --git a/conf/docker-dcm/site-remote.json b/conf/docker-dcm/site-remote.json deleted file mode 100644 index d47c3ef4dda..00000000000 --- a/conf/docker-dcm/site-remote.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "hostname": "remote.libra.research", - "name": "LibraResearch Institute", - "primaryStorage": false, - "transferProtocols": "rsync" -} diff --git a/conf/jhove/jhove.conf b/conf/jhove/jhove.conf index 5134ae0f81a..971c60acfaa 100644 --- a/conf/jhove/jhove.conf +++ b/conf/jhove/jhove.conf @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://hul.harvard.edu/ois/xml/ns/jhove/jhoveConfig" xsi:schemaLocation="http://hul.harvard.edu/ois/xml/ns/jhove/jhoveConfig - file:///usr/local/payara5/glassfish/domains/domain1/config/jhoveConfig.xsd"> + file:///usr/local/payara6/glassfish/domains/domain1/config/jhoveConfig.xsd"> /usr/local/src/jhove utf-8 /tmp diff --git a/conf/solr/8.11.1/readme.md b/conf/solr/8.11.1/readme.md deleted file mode 100644 index 4457cf9a7df..00000000000 --- a/conf/solr/8.11.1/readme.md +++ /dev/null @@ -1 +0,0 @@ -Please see the dev guide for what to do with Solr config files. \ No newline at end of file diff --git a/conf/solr/8.11.1/schema.xml b/conf/solr/9.3.0/schema.xml similarity index 96% rename from conf/solr/8.11.1/schema.xml rename to conf/solr/9.3.0/schema.xml index ceff082f418..3c15b659c4e 100644 --- a/conf/solr/8.11.1/schema.xml +++ b/conf/solr/9.3.0/schema.xml @@ -23,7 +23,7 @@ For more information, on how to customize this file, please see - http://lucene.apache.org/solr/guide/documents-fields-and-schema-design.html + https://solr.apache.org/guide/solr/latest/indexing-guide/schema-elements.html PERFORMANCE NOTE: this schema includes many optional features and should not be used for benchmarking. To improve performance one could @@ -38,7 +38,7 @@ catchall "text" field, and use that for searching. --> - + - - - - - - - - - + + @@ -163,7 +156,7 @@ - + @@ -200,7 +193,7 @@ - + @@ -208,9 +201,9 @@ - + - + @@ -218,10 +211,10 @@ - + - + @@ -658,7 +651,8 @@ + RESTRICTION: the glob-like pattern in the name attribute must have a "*" + only at the start or the end. --> @@ -666,19 +660,23 @@ - + + + + + + + - - - - + + @@ -724,43 +722,6 @@ field first in an ascending sort and last in a descending sort. --> - - - - - - - - - - - - - - - - + - + + + + + + + + + + + + + + + + + + + - - - - - - - 7.3.0 - - - - - - - - - - - - - - - - - - - - ${solr.data.dir:} - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ${solr.lock.type:native} - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ${solr.ulog.dir:} - ${solr.ulog.numVersionBuckets:65536} - - - - - ${solr.autoCommit.maxTime:15000} - false - - - - - - ${solr.autoSoftCommit.maxTime:-1} - - - - - - - - - - - - - - 1024 - - - - - - - - - - - - - - - - - - - - - - - - true - - - - - - 20 - - - 200 - - - - - - - - - - - - - - - - false - - - - - - - - - - - - - - - - - - - - - - explicit - 10 - edismax - 0.075 - - dvName^400 - authorName^180 - dvSubject^190 - dvDescription^180 - dvAffiliation^170 - title^130 - subject^120 - keyword^110 - topicClassValue^100 - dsDescriptionValue^90 - authorAffiliation^80 - publicationCitation^60 - producerName^50 - fileName^30 - fileDescription^30 - variableLabel^20 - variableName^10 - _text_^1.0 - - - dvName^200 - authorName^100 - dvSubject^100 - dvDescription^100 - dvAffiliation^100 - title^75 - subject^75 - keyword^75 - topicClassValue^75 - dsDescriptionValue^75 - authorAffiliation^75 - publicationCitation^75 - producerName^75 - - - - isHarvested:false^25000 - - - - - - - - - - - - - - - - - - explicit - json - true - - - - - - - - explicit - - - - - - _text_ - - - - - - - true - ignored_ - _text_ - - - - - - - - - text_general - - - - - - default - _text_ - solr.DirectSolrSpellChecker - - internal - - 0.5 - - 2 - - 1 - - 5 - - 4 - - 0.01 - - - - - - - - - - - - default - on - true - 10 - 5 - 5 - true - true - 10 - 5 - - - spellcheck - - - - - - - - - - true - - - tvComponent - - - - - - - - - - - - true - false - - - terms - - - - - - - - string - - - - - - explicit - - - elevator - - - - - - - - - - - 100 - - - - - - - - 70 - - 0.5 - - [-\w ,/\n\"']{20,200} - - - - - - - ]]> - ]]> - - - - - - - - - - - - - - - - - - - - - - - - ,, - ,, - ,, - ,, - ,]]> - ]]> - - - - - - 10 - .,!? - - - - - - - WORD - - - en - US - - - - - - - - - - - - - - [^\w-\.] - _ - - - - - - - yyyy-MM-dd'T'HH:mm:ss.SSSZ - yyyy-MM-dd'T'HH:mm:ss,SSSZ - yyyy-MM-dd'T'HH:mm:ss.SSS - yyyy-MM-dd'T'HH:mm:ss,SSS - yyyy-MM-dd'T'HH:mm:ssZ - yyyy-MM-dd'T'HH:mm:ss - yyyy-MM-dd'T'HH:mmZ - yyyy-MM-dd'T'HH:mm - yyyy-MM-dd HH:mm:ss.SSSZ - yyyy-MM-dd HH:mm:ss,SSSZ - yyyy-MM-dd HH:mm:ss.SSS - yyyy-MM-dd HH:mm:ss,SSS - yyyy-MM-dd HH:mm:ssZ - yyyy-MM-dd HH:mm:ss - yyyy-MM-dd HH:mmZ - yyyy-MM-dd HH:mm - yyyy-MM-dd - - - - - - - - - - - - - - - - - - - - - - - - - - - - - text/plain; charset=UTF-8 - - - - - ${velocity.template.base.dir:} - ${velocity.solr.resource.loader.enabled:true} - ${velocity.params.resource.loader.enabled:false} - - - - - 5 - - - - - - - - - - - - - - + + + + + + + + + 9.7 + + + + + + + + + + + ${solr.data.dir:} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ${solr.lock.type:native} + + + + + + + + + + + + + + + + + + + + + ${solr.ulog.dir:} + ${solr.ulog.numVersionBuckets:65536} + + + + + ${solr.autoCommit.maxTime:15000} + false + + + + + + ${solr.autoSoftCommit.maxTime:-1} + + + + + + + + + + + + + + ${solr.max.booleanClauses:1024} + + + + + + + + + + + + + + + + + + + + + + + + true + + + + + + 20 + + + 200 + + + + + + + + + + + + + + + + + + + false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + explicit + 10 + + edismax + 0.075 + + dvName^400 + authorName^180 + dvSubject^190 + dvDescription^180 + dvAffiliation^170 + title^130 + subject^120 + keyword^110 + topicClassValue^100 + dsDescriptionValue^90 + authorAffiliation^80 + publicationCitation^60 + producerName^50 + fileName^30 + fileDescription^30 + variableLabel^20 + variableName^10 + _text_^1.0 + + + dvName^200 + authorName^100 + dvSubject^100 + dvDescription^100 + dvAffiliation^100 + title^75 + subject^75 + keyword^75 + topicClassValue^75 + dsDescriptionValue^75 + authorAffiliation^75 + publicationCitation^75 + producerName^75 + + + + isHarvested:false^25000 + + + + + + + + explicit + json + true + + + + + + + _text_ + + + + + + + text_general + + + + + + default + _text_ + solr.DirectSolrSpellChecker + + internal + + 0.5 + + 2 + + 1 + + 5 + + 4 + + 0.01 + + + + + + + + + + + + + + + + + + + + + 100 + + + + + + + + 70 + + 0.5 + + [-\w ,/\n\"']{20,200} + + + + + + + ]]> + ]]> + + + + + + + + + + + + + + + + + + + + + + + + ,, + ,, + ,, + ,, + ,]]> + ]]> + + + + + + 10 + .,!? + + + + + + + WORD + + + en + US + + + + + + + + + + + + + [^\w-\.] + _ + + + + + + + yyyy-MM-dd['T'[HH:mm[:ss[.SSS]][z + yyyy-MM-dd['T'[HH:mm[:ss[,SSS]][z + yyyy-MM-dd HH:mm[:ss[.SSS]][z + yyyy-MM-dd HH:mm[:ss[,SSS]][z + [EEE, ]dd MMM yyyy HH:mm[:ss] z + EEEE, dd-MMM-yy HH:mm:ss z + EEE MMM ppd HH:mm:ss [z ]yyyy + + + + + java.lang.String + text_general + + *_str + 256 + + + true + + + java.lang.Boolean + booleans + + + java.util.Date + pdates + + + java.lang.Long + java.lang.Integer + plongs + + + java.lang.Number + pdoubles + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/conf/solr/8.11.1/update-fields.sh b/conf/solr/9.3.0/update-fields.sh similarity index 100% rename from conf/solr/8.11.1/update-fields.sh rename to conf/solr/9.3.0/update-fields.sh diff --git a/conf/vagrant/etc/shibboleth/attribute-map.xml b/conf/vagrant/etc/shibboleth/attribute-map.xml deleted file mode 100644 index f6386b620f5..00000000000 --- a/conf/vagrant/etc/shibboleth/attribute-map.xml +++ /dev/null @@ -1,141 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/conf/vagrant/etc/shibboleth/dataverse-idp-metadata.xml b/conf/vagrant/etc/shibboleth/dataverse-idp-metadata.xml deleted file mode 100644 index 67225b5e670..00000000000 --- a/conf/vagrant/etc/shibboleth/dataverse-idp-metadata.xml +++ /dev/null @@ -1,298 +0,0 @@ - - - - - - - - - - - - - - - - - - - - testshib.org - - TestShib Test IdP - TestShib IdP. Use this as a source of attributes - for your test SP. - https://www.testshib.org/images/testshib-transp.png - - - - - - - - MIIEDjCCAvagAwIBAgIBADANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJVUzEV - MBMGA1UECBMMUGVubnN5bHZhbmlhMRMwEQYDVQQHEwpQaXR0c2J1cmdoMREwDwYD - VQQKEwhUZXN0U2hpYjEZMBcGA1UEAxMQaWRwLnRlc3RzaGliLm9yZzAeFw0wNjA4 - MzAyMTEyMjVaFw0xNjA4MjcyMTEyMjVaMGcxCzAJBgNVBAYTAlVTMRUwEwYDVQQI - EwxQZW5uc3lsdmFuaWExEzARBgNVBAcTClBpdHRzYnVyZ2gxETAPBgNVBAoTCFRl - c3RTaGliMRkwFwYDVQQDExBpZHAudGVzdHNoaWIub3JnMIIBIjANBgkqhkiG9w0B - AQEFAAOCAQ8AMIIBCgKCAQEArYkCGuTmJp9eAOSGHwRJo1SNatB5ZOKqDM9ysg7C - yVTDClcpu93gSP10nH4gkCZOlnESNgttg0r+MqL8tfJC6ybddEFB3YBo8PZajKSe - 3OQ01Ow3yT4I+Wdg1tsTpSge9gEz7SrC07EkYmHuPtd71CHiUaCWDv+xVfUQX0aT - NPFmDixzUjoYzbGDrtAyCqA8f9CN2txIfJnpHE6q6CmKcoLADS4UrNPlhHSzd614 - kR/JYiks0K4kbRqCQF0Dv0P5Di+rEfefC6glV8ysC8dB5/9nb0yh/ojRuJGmgMWH - gWk6h0ihjihqiu4jACovUZ7vVOCgSE5Ipn7OIwqd93zp2wIDAQABo4HEMIHBMB0G - A1UdDgQWBBSsBQ869nh83KqZr5jArr4/7b+QazCBkQYDVR0jBIGJMIGGgBSsBQ86 - 9nh83KqZr5jArr4/7b+Qa6FrpGkwZzELMAkGA1UEBhMCVVMxFTATBgNVBAgTDFBl - bm5zeWx2YW5pYTETMBEGA1UEBxMKUGl0dHNidXJnaDERMA8GA1UEChMIVGVzdFNo - aWIxGTAXBgNVBAMTEGlkcC50ZXN0c2hpYi5vcmeCAQAwDAYDVR0TBAUwAwEB/zAN - BgkqhkiG9w0BAQUFAAOCAQEAjR29PhrCbk8qLN5MFfSVk98t3CT9jHZoYxd8QMRL - I4j7iYQxXiGJTT1FXs1nd4Rha9un+LqTfeMMYqISdDDI6tv8iNpkOAvZZUosVkUo - 93pv1T0RPz35hcHHYq2yee59HJOco2bFlcsH8JBXRSRrJ3Q7Eut+z9uo80JdGNJ4 - /SJy5UorZ8KazGj16lfJhOBXldgrhppQBb0Nq6HKHguqmwRfJ+WkxemZXzhediAj - Geka8nz8JjwxpUjAiSWYKLtJhGEaTqCYxCCX2Dw+dOTqUzHOZ7WKv4JXPK5G/Uhr - 8K/qhmFT2nIQi538n6rVYLeWj8Bbnl+ev0peYzxFyF5sQA== - - - - - - - - - - - - - - - urn:mace:shibboleth:1.0:nameIdentifier - urn:oasis:names:tc:SAML:2.0:nameid-format:transient - - - - - - - - - - - - - - - - MIIEDjCCAvagAwIBAgIBADANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJVUzEV - MBMGA1UECBMMUGVubnN5bHZhbmlhMRMwEQYDVQQHEwpQaXR0c2J1cmdoMREwDwYD - VQQKEwhUZXN0U2hpYjEZMBcGA1UEAxMQaWRwLnRlc3RzaGliLm9yZzAeFw0wNjA4 - MzAyMTEyMjVaFw0xNjA4MjcyMTEyMjVaMGcxCzAJBgNVBAYTAlVTMRUwEwYDVQQI - EwxQZW5uc3lsdmFuaWExEzARBgNVBAcTClBpdHRzYnVyZ2gxETAPBgNVBAoTCFRl - c3RTaGliMRkwFwYDVQQDExBpZHAudGVzdHNoaWIub3JnMIIBIjANBgkqhkiG9w0B - AQEFAAOCAQ8AMIIBCgKCAQEArYkCGuTmJp9eAOSGHwRJo1SNatB5ZOKqDM9ysg7C - yVTDClcpu93gSP10nH4gkCZOlnESNgttg0r+MqL8tfJC6ybddEFB3YBo8PZajKSe - 3OQ01Ow3yT4I+Wdg1tsTpSge9gEz7SrC07EkYmHuPtd71CHiUaCWDv+xVfUQX0aT - NPFmDixzUjoYzbGDrtAyCqA8f9CN2txIfJnpHE6q6CmKcoLADS4UrNPlhHSzd614 - kR/JYiks0K4kbRqCQF0Dv0P5Di+rEfefC6glV8ysC8dB5/9nb0yh/ojRuJGmgMWH - gWk6h0ihjihqiu4jACovUZ7vVOCgSE5Ipn7OIwqd93zp2wIDAQABo4HEMIHBMB0G - A1UdDgQWBBSsBQ869nh83KqZr5jArr4/7b+QazCBkQYDVR0jBIGJMIGGgBSsBQ86 - 9nh83KqZr5jArr4/7b+Qa6FrpGkwZzELMAkGA1UEBhMCVVMxFTATBgNVBAgTDFBl - bm5zeWx2YW5pYTETMBEGA1UEBxMKUGl0dHNidXJnaDERMA8GA1UEChMIVGVzdFNo - aWIxGTAXBgNVBAMTEGlkcC50ZXN0c2hpYi5vcmeCAQAwDAYDVR0TBAUwAwEB/zAN - BgkqhkiG9w0BAQUFAAOCAQEAjR29PhrCbk8qLN5MFfSVk98t3CT9jHZoYxd8QMRL - I4j7iYQxXiGJTT1FXs1nd4Rha9un+LqTfeMMYqISdDDI6tv8iNpkOAvZZUosVkUo - 93pv1T0RPz35hcHHYq2yee59HJOco2bFlcsH8JBXRSRrJ3Q7Eut+z9uo80JdGNJ4 - /SJy5UorZ8KazGj16lfJhOBXldgrhppQBb0Nq6HKHguqmwRfJ+WkxemZXzhediAj - Geka8nz8JjwxpUjAiSWYKLtJhGEaTqCYxCCX2Dw+dOTqUzHOZ7WKv4JXPK5G/Uhr - 8K/qhmFT2nIQi538n6rVYLeWj8Bbnl+ev0peYzxFyF5sQA== - - - - - - - - - - - - - - - - urn:mace:shibboleth:1.0:nameIdentifier - urn:oasis:names:tc:SAML:2.0:nameid-format:transient - - - - - TestShib Two Identity Provider - TestShib Two - http://www.testshib.org/testshib-two/ - - - Nate - Klingenstein - ndk@internet2.edu - - - - - - - - - - - - - - - - - - - - - - - - - TestShib Test SP - TestShib SP. Log into this to test your machine. - Once logged in check that all attributes that you expected have been - released. - https://www.testshib.org/images/testshib-transp.png - - - - - - - - MIIEPjCCAyagAwIBAgIBADANBgkqhkiG9w0BAQUFADB3MQswCQYDVQQGEwJVUzEV - MBMGA1UECBMMUGVubnN5bHZhbmlhMRMwEQYDVQQHEwpQaXR0c2J1cmdoMSIwIAYD - VQQKExlUZXN0U2hpYiBTZXJ2aWNlIFByb3ZpZGVyMRgwFgYDVQQDEw9zcC50ZXN0 - c2hpYi5vcmcwHhcNMDYwODMwMjEyNDM5WhcNMTYwODI3MjEyNDM5WjB3MQswCQYD - VQQGEwJVUzEVMBMGA1UECBMMUGVubnN5bHZhbmlhMRMwEQYDVQQHEwpQaXR0c2J1 - cmdoMSIwIAYDVQQKExlUZXN0U2hpYiBTZXJ2aWNlIFByb3ZpZGVyMRgwFgYDVQQD - Ew9zcC50ZXN0c2hpYi5vcmcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB - AQDJyR6ZP6MXkQ9z6RRziT0AuCabDd3x1m7nLO9ZRPbr0v1LsU+nnC363jO8nGEq - sqkgiZ/bSsO5lvjEt4ehff57ERio2Qk9cYw8XCgmYccVXKH9M+QVO1MQwErNobWb - AjiVkuhWcwLWQwTDBowfKXI87SA7KR7sFUymNx5z1aoRvk3GM++tiPY6u4shy8c7 - vpWbVfisfTfvef/y+galxjPUQYHmegu7vCbjYP3On0V7/Ivzr+r2aPhp8egxt00Q - XpilNai12LBYV3Nv/lMsUzBeB7+CdXRVjZOHGuQ8mGqEbsj8MBXvcxIKbcpeK5Zi - JCVXPfarzuriM1G5y5QkKW+LAgMBAAGjgdQwgdEwHQYDVR0OBBYEFKB6wPDxwYrY - StNjU5P4b4AjBVQVMIGhBgNVHSMEgZkwgZaAFKB6wPDxwYrYStNjU5P4b4AjBVQV - oXukeTB3MQswCQYDVQQGEwJVUzEVMBMGA1UECBMMUGVubnN5bHZhbmlhMRMwEQYD - VQQHEwpQaXR0c2J1cmdoMSIwIAYDVQQKExlUZXN0U2hpYiBTZXJ2aWNlIFByb3Zp - ZGVyMRgwFgYDVQQDEw9zcC50ZXN0c2hpYi5vcmeCAQAwDAYDVR0TBAUwAwEB/zAN - BgkqhkiG9w0BAQUFAAOCAQEAc06Kgt7ZP6g2TIZgMbFxg6vKwvDL0+2dzF11Onpl - 5sbtkPaNIcj24lQ4vajCrrGKdzHXo9m54BzrdRJ7xDYtw0dbu37l1IZVmiZr12eE - Iay/5YMU+aWP1z70h867ZQ7/7Y4HW345rdiS6EW663oH732wSYNt9kr7/0Uer3KD - 9CuPuOidBacospDaFyfsaJruE99Kd6Eu/w5KLAGG+m0iqENCziDGzVA47TngKz2v - PVA+aokoOyoz3b53qeti77ijatSEoKjxheBWpO+eoJeGq/e49Um3M2ogIX/JAlMa - Inh+vYSYngQB2sx9LGkR9KHaMKNIGCDehk93Xla4pWJx1w== - - - - - - - - - - - - - - - - - - - - - urn:oasis:names:tc:SAML:2.0:nameid-format:transient - urn:mace:shibboleth:1.0:nameIdentifier - - - - - - - - - - - - - - - - - - - - TestShib Two Service Provider - TestShib Two - http://www.testshib.org/testshib-two/ - - - Nate - Klingenstein - ndk@internet2.edu - - - - - - - diff --git a/conf/vagrant/etc/shibboleth/shibboleth2.xml b/conf/vagrant/etc/shibboleth/shibboleth2.xml deleted file mode 100644 index 946e73bdf6a..00000000000 --- a/conf/vagrant/etc/shibboleth/shibboleth2.xml +++ /dev/null @@ -1,85 +0,0 @@ - - - - - - - - - - - - - - - - - - - - SAML2 SAML1 - - - - SAML2 Local - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/conf/vagrant/etc/yum.repos.d/epel-apache-maven.repo b/conf/vagrant/etc/yum.repos.d/epel-apache-maven.repo deleted file mode 100644 index 1e0f8200040..00000000000 --- a/conf/vagrant/etc/yum.repos.d/epel-apache-maven.repo +++ /dev/null @@ -1,15 +0,0 @@ -# Place this file in your /etc/yum.repos.d/ directory - -[epel-apache-maven] -name=maven from apache foundation. -baseurl=http://repos.fedorapeople.org/repos/dchen/apache-maven/epel-$releasever/$basearch/ -enabled=1 -skip_if_unavailable=1 -gpgcheck=0 - -[epel-apache-maven-source] -name=maven from apache foundation. - Source -baseurl=http://repos.fedorapeople.org/repos/dchen/apache-maven/epel-$releasever/SRPMS -enabled=0 -skip_if_unavailable=1 -gpgcheck=0 diff --git a/conf/vagrant/etc/yum.repos.d/shibboleth.repo b/conf/vagrant/etc/yum.repos.d/shibboleth.repo deleted file mode 100644 index adf42185d8a..00000000000 --- a/conf/vagrant/etc/yum.repos.d/shibboleth.repo +++ /dev/null @@ -1,9 +0,0 @@ -[shibboleth] -name=Shibboleth (rockylinux8) -# Please report any problems to https://shibboleth.atlassian.net/jira -type=rpm-md -mirrorlist=https://shibboleth.net/cgi-bin/mirrorlist.cgi/rockylinux8 -gpgcheck=1 -gpgkey=https://shibboleth.net/downloads/service-provider/RPMS/repomd.xml.key - https://shibboleth.net/downloads/service-provider/RPMS/cantor.repomd.xml.key -enabled=1 diff --git a/conf/vagrant/var/lib/pgsql/data/pg_hba.conf b/conf/vagrant/var/lib/pgsql/data/pg_hba.conf deleted file mode 100644 index e3244686066..00000000000 --- a/conf/vagrant/var/lib/pgsql/data/pg_hba.conf +++ /dev/null @@ -1,74 +0,0 @@ -# PostgreSQL Client Authentication Configuration File -# =================================================== -# -# Refer to the "Client Authentication" section in the -# PostgreSQL documentation for a complete description -# of this file. A short synopsis follows. -# -# This file controls: which hosts are allowed to connect, how clients -# are authenticated, which PostgreSQL user names they can use, which -# databases they can access. Records take one of these forms: -# -# local DATABASE USER METHOD [OPTIONS] -# host DATABASE USER CIDR-ADDRESS METHOD [OPTIONS] -# hostssl DATABASE USER CIDR-ADDRESS METHOD [OPTIONS] -# hostnossl DATABASE USER CIDR-ADDRESS METHOD [OPTIONS] -# -# (The uppercase items must be replaced by actual values.) -# -# The first field is the connection type: "local" is a Unix-domain socket, -# "host" is either a plain or SSL-encrypted TCP/IP socket, "hostssl" is an -# SSL-encrypted TCP/IP socket, and "hostnossl" is a plain TCP/IP socket. -# -# DATABASE can be "all", "sameuser", "samerole", a database name, or -# a comma-separated list thereof. -# -# USER can be "all", a user name, a group name prefixed with "+", or -# a comma-separated list thereof. In both the DATABASE and USER fields -# you can also write a file name prefixed with "@" to include names from -# a separate file. -# -# CIDR-ADDRESS specifies the set of hosts the record matches. -# It is made up of an IP address and a CIDR mask that is an integer -# (between 0 and 32 (IPv4) or 128 (IPv6) inclusive) that specifies -# the number of significant bits in the mask. Alternatively, you can write -# an IP address and netmask in separate columns to specify the set of hosts. -# -# METHOD can be "trust", "reject", "md5", "password", "gss", "sspi", "krb5", -# "ident", "pam", "ldap" or "cert". Note that "password" sends passwords -# in clear text; "md5" is preferred since it sends encrypted passwords. -# -# OPTIONS are a set of options for the authentication in the format -# NAME=VALUE. The available options depend on the different authentication -# methods - refer to the "Client Authentication" section in the documentation -# for a list of which options are available for which authentication methods. -# -# Database and user names containing spaces, commas, quotes and other special -# characters must be quoted. Quoting one of the keywords "all", "sameuser" or -# "samerole" makes the name lose its special character, and just match a -# database or username with that name. -# -# This file is read on server startup and when the postmaster receives -# a SIGHUP signal. If you edit the file on a running system, you have -# to SIGHUP the postmaster for the changes to take effect. You can use -# "pg_ctl reload" to do that. - -# Put your actual configuration here -# ---------------------------------- -# -# If you want to allow non-local connections, you need to add more -# "host" records. In that case you will also need to make PostgreSQL listen -# on a non-local interface via the listen_addresses configuration parameter, -# or via the -i or -h command line switches. -# - - - -# TYPE DATABASE USER CIDR-ADDRESS METHOD - -# "local" is for Unix domain socket connections only -local all all trust -# IPv4 local connections: -host all all 127.0.0.1/32 trust -# IPv6 local connections: -host all all ::1/128 trust diff --git a/conf/vagrant/var/www/dataverse/error-documents/503.html b/conf/vagrant/var/www/dataverse/error-documents/503.html deleted file mode 100644 index 95a7dea4107..00000000000 --- a/conf/vagrant/var/www/dataverse/error-documents/503.html +++ /dev/null @@ -1 +0,0 @@ -

Custom "site is unavailable" 503 page.

diff --git a/doc/release-notes/1249-collapse_dataverse_description.md b/doc/release-notes/1249-collapse_dataverse_description.md deleted file mode 100644 index 8fe933005de..00000000000 --- a/doc/release-notes/1249-collapse_dataverse_description.md +++ /dev/null @@ -1 +0,0 @@ -Long descriptions for collections are now truncated but can be expanded to read the full description. diff --git a/doc/release-notes/215-handles-upgrade-reminder.md b/doc/release-notes/215-handles-upgrade-reminder.md deleted file mode 100644 index e1cbb8b0d8c..00000000000 --- a/doc/release-notes/215-handles-upgrade-reminder.md +++ /dev/null @@ -1,8 +0,0 @@ -### A reminder for the installations using Handles persistent identifiers (instead of DOIs): remember to upgrade your Handles service installation to a currently supported version. - -Generally, Handles is known to be working reliably even when running older versions that haven't been officially supported in years. We still recommend to check on your service and make sure to upgrade to a supported version (the latest version is 9.3.1, https://www.handle.net/hnr-source/handle-9.3.1-distribution.tar.gz, as of writing this). An older version may be running for you seemingly just fine, but do keep in mind that it may just stop working unexpectedly at any moment, because of some incompatibility introduced in a Java rpm upgrade, or anything similarly unpredictable. - -Handles is also very good about backward incompatibility. Meaning, in most cases you can simply stop the old version, unpack the new version from the distribution and start it on the existing config and database files, and it'll just keep working. However, it is a good idea to keep up with the recommended format upgrades, for the sake of efficiency and to avoid any unexpected surprises, should they finally decide to drop the old database format, for example. The two specific things we recommend: 1) Make sure your service is using a json version of the `siteinfo` bundle (i.e., if you are still using `siteinfo.bin`, convert it to `siteinfo.json` and remove the binary file from the service directory) and 2) Make sure you are using the newer bdbje database format for your handles catalog (i.e., if you still have the files `handles.jdb` and `nas.jdb` in your server directory, convert them to the new format). Follow the simple conversion instructions in the file README.txt in the Handles software distribution. Make sure to stop the service before converting the files and make sure to have a full backup of the existing server directory, just in case). Do not hesitate to contact the Handles support with any questions you may have, as they are very responsive and helpful. - - - diff --git a/doc/release-notes/3913-delete-file-endpoint b/doc/release-notes/3913-delete-file-endpoint deleted file mode 100644 index b8b0c2f1ec5..00000000000 --- a/doc/release-notes/3913-delete-file-endpoint +++ /dev/null @@ -1 +0,0 @@ -Support for deleting files using native API: http://preview.guides.gdcc.io/en/develop/api/native-api.html#deleting-files diff --git a/doc/release-notes/5.14-release-notes.md b/doc/release-notes/5.14-release-notes.md new file mode 100644 index 00000000000..ef2a3b59659 --- /dev/null +++ b/doc/release-notes/5.14-release-notes.md @@ -0,0 +1,404 @@ +# Dataverse Software 5.14 + +(If this note appears truncated on the GitHub Releases page, you can view it in full in the source tree: https://github.com/IQSS/dataverse/blob/master/doc/release-notes/5.14-release-notes.md) + +This release brings new features, enhancements, and bug fixes to the Dataverse software. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. + +Please note that, as an experiment, the sections of this release note are organized in a different order. The Upgrade and Installation sections are at the top, with the detailed sections highlighting new features and fixes further down. + +## Installation + +If this is a new installation, please see our [Installation Guide](https://guides.dataverse.org/en/5.14/installation/). Please don't be shy about [asking for help](https://guides.dataverse.org/en/5.14/installation/intro.html#getting-help) if you need it! + +After your installation has gone into production, you are welcome to add it to our [map of installations](https://dataverse.org/installations) by opening an issue in the [dataverse-installations](https://github.com/IQSS/dataverse-installations) repo. + +## Upgrade Instructions + +0\. These instructions assume that you are upgrading from 5.13. If you are running an earlier version, the only safe way to upgrade is to progress through the upgrades to all the releases in between before attempting the upgrade to 5.14. + +If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. Use `sudo` to change to that user first. For example, `sudo -i -u dataverse` if `dataverse` is your dedicated application user. + +In the following commands we assume that Payara 5 is installed in `/usr/local/payara5`. If not, adjust as needed. + +`export PAYARA=/usr/local/payara5` + +(or `setenv PAYARA /usr/local/payara5` if you are using a `csh`-like shell) + +1\. Undeploy the previous version. + +- `$PAYARA/bin/asadmin undeploy dataverse-5.13` + +2\. Stop Payara and remove the generated directory + +- `service payara stop` +- `rm -rf $PAYARA/glassfish/domains/domain1/generated` + +3\. Start Payara + +- `service payara start` + +4\. Deploy this version. + +- `$PAYARA/bin/asadmin deploy dataverse-5.14.war` + +5\. Restart Payara + +- `service payara stop` +- `service payara start` + +6\. Update the Citation metadata block: (the update makes the field Series repeatable) + +- `wget https://github.com/IQSS/dataverse/releases/download/v5.14/citation.tsv` +- `curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @citation.tsv -H "Content-type: text/tab-separated-values"` + +If you are running an English-only installation, you are finished with the citation block. Otherwise, download the updated citation.properties file and place it in the [`dataverse.lang.directory`](https://guides.dataverse.org/en/5.14/installation/config.html#configuring-the-lang-directory); `/home/dataverse/langBundles` used in the example below. + +- `wget https://github.com/IQSS/dataverse/releases/download/v5.14/citation.properties` +- `cp citation.properties /home/dataverse/langBundles` + +7\. Upate Solr schema.xml to allow multiple series to be used. See specific instructions below for those installations without custom metadata blocks (7a) and those with custom metadata blocks (7b). + +7a\. For installations without custom or experimental metadata blocks: + +- Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script)) + +- Replace schema.xml + + - `cp /tmp/dvinstall/schema.xml /usr/local/solr/solr-8.11.1/server/solr/collection1/conf` + +- Start Solr instance (usually `service solr start`, depending on Solr/OS) + +7b\. For installations with custom or experimental metadata blocks: + +- Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script)) + +- There are 2 ways to regenerate the schema: Either by collecting the output of the Dataverse schema API and feeding it to the `update-fields.sh` script that we supply, as in the example below (modify the command lines as needed): +``` + wget https://raw.githubusercontent.com/IQSS/dataverse/master/conf/solr/8.11.1/update-fields.sh + chmod +x update-fields.sh + curl "http://localhost:8080/api/admin/index/solr/schema" | ./update-fields.sh /usr/local/solr/solr-8.8.1/server/solr/collection1/conf/schema.xml +``` +OR, alternatively, you can edit the following lines in your schema.xml by hand as follows (to indicate that series and its components are now `multiValued="true"`): +``` + + + +``` + +- Restart Solr instance (usually `service solr restart` depending on solr/OS) + +8\. Run ReExportAll to update dataset metadata exports. Follow the directions in the [Admin Guide](http://guides.dataverse.org/en/5.14/admin/metadataexport.html#batch-exports-through-the-api). + +9\. If your installation did not have :FilePIDsEnabled set, you will need to set it to true to keep file PIDs enabled: + + curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:FilePIDsEnabled + +10\. If your installation uses Handles as persistent identifiers (instead of DOIs): remember to upgrade your Handles service installation to a currently supported version. + +Generally, Handles is known to be working reliably even when running older versions that haven't been officially supported in years. We still recommend to check on your service and make sure to upgrade to a supported version (the latest version is 9.3.1, https://www.handle.net/hnr-source/handle-9.3.1-distribution.tar.gz, as of writing this). An older version may be running for you seemingly just fine, but do keep in mind that it may just stop working unexpectedly at any moment, because of some incompatibility introduced in a Java rpm upgrade, or anything similarly unpredictable. + +Handles is also very good about backward incompatibility. Meaning, in most cases you can simply stop the old version, unpack the new version from the distribution and start it on the existing config and database files, and it'll just keep working. However, it is a good idea to keep up with the recommended format upgrades, for the sake of efficiency and to avoid any unexpected surprises, should they finally decide to drop the old database format, for example. The two specific things we recommend: 1) Make sure your service is using a json version of the `siteinfo` bundle (i.e., if you are still using `siteinfo.bin`, convert it to `siteinfo.json` and remove the binary file from the service directory) and 2) Make sure you are using the newer bdbje database format for your handles catalog (i.e., if you still have the files `handles.jdb` and `nas.jdb` in your server directory, convert them to the new format). Follow the simple conversion instructions in the file README.txt in the Handles software distribution. Make sure to stop the service before converting the files and make sure to have a full backup of the existing server directory, just in case. Do not hesitate to contact the Handles support with any questions you may have, as they are very responsive and helpful. + +## New JVM Options and MicroProfile Config Options + +The following PID provider options are now available. See the section "Changes to PID Provider JVM Settings" below for more information. + +- `dataverse.pid.datacite.mds-api-url` +- `dataverse.pid.datacite.rest-api-url` +- `dataverse.pid.datacite.username` +- `dataverse.pid.datacite.password` +- `dataverse.pid.handlenet.key.path` +- `dataverse.pid.handlenet.key.passphrase` +- `dataverse.pid.handlenet.index` +- `dataverse.pid.permalink.base-url` +- `dataverse.pid.ezid.api-url` +- `dataverse.pid.ezid.username` +- `dataverse.pid.ezid.password` + +The following MicroProfile Config options have been added as part of [Signposting](https://signposting.org/) support. See the section "Signposting for Dataverse" below for details. + +- `dataverse.signposting.level1-author-limit` +- `dataverse.signposting.level1-item-limit` + +The following JVM options are described in the "Creating datasets with incomplete metadata through API" section below. + +- `dataverse.api.allow-incomplete-metadata` +- `dataverse.ui.show-validity-filter` +- `dataverse.ui.allow-review-for-incomplete` + +The following JVM/MicroProfile setting is for External Exporters. See "Mechanism Added for Adding External Exporters" below. + +- `dataverse.spi.export.directory` + +The following JVM/MicroProfile settings are for handling of support emails. See "Contact Email Improvements" below. + +- `dataverse.mail.support-email` +- `dataverse.mail.cc-support-on-contact-emails` + +The following JVM/MicroProfile setting is for extracting a geospatial bounding box even if S3 direct upload is enabled. + +- `dataverse.netcdf.geo-extract-s3-direct-upload` + +## Backward Incompatibilities + +The following list of potential backward incompatibilities references the sections of the "Detailed Release Highlights..." portion of the document further below where the corresponding changes are explained in detail. + +### Using the new External Exporters framework + +Care should be taken when replacing Dataverse's internal metadata export formats as third party code, including other third party Exporters, may depend on the contents of those export formats. When replacing an existing format, one must also remember to delete the cached metadata export files or run the reExport command for the metadata exports of existing datasets to be updated. + +See "Mechanism Added for Adding External Exporters". + +### Publishing via API + +When publishing a dataset via API, it now mirrors the UI behavior by requiring that the dataset has either a standard license configured, or has valid Custom Terms of Use (if allowed by the instance). Attempting to publish a dataset without such **will fail with an error message**. + +See "Handling of license information fixed in the API" for guidance on how to ensure that datasets created or updated via native API have a license configured. + + + +## Detailed Release Highlights, New Features and Use Case Scenarios + +### For Dataverse developers, support for running Dataverse in Docker (experimental) + +Developers can experiment with running Dataverse in Docker: (PR #9439) + +This is an image developers build locally (or can pull from Docker Hub). It is not meant for production use! + +To provide a complete container-based local development environment, developers can deploy a Dataverse container from +the new image in addition to other containers for necessary dependencies: +https://guides.dataverse.org/en/5.14/container/dev-usage.html + +Please note that with this emerging solution we will sunset older tooling like `docker-aio` and `docker-dcm`. +We envision more testing possibilities in the future, to be discussed as part of the +[Dataverse Containerization Working Group](https://ct.gdcc.io). There is no sunsetting roadmap yet, but you have been warned. +If there is some specific feature of these tools you would like to be kept, please [reach out](https://ct.gdcc.io). + +### Indexing performance improved + +Noticeable improvements in performance, especially for large datasets containing thousands of files. +Uploading files one by one to the dataset is much faster now, allowing uploading thousands of files in an acceptable timeframe. Not only uploading a file, but all edit operations on datasets containing many files, got faster. +Performance tweaks include indexing of the datasets in the background and optimizations in the amount of the indexing operations needed. Furthermore, updates to the dateset no longer wait for ingesting to finish. Ingesting was already running in the background, but it took a lock, preventing updating the dataset and degrading performance for datasets containing many files. (PR #9558) + +### For installations using MDC (Make Data Count), it is now possible to display both the MDC metrics and the legacy access counts, generated before MDC was enabled. + +This is enabled via the new setting `:MDCStartDate` that specifies the cutoff date. If a dataset has any legacy access counts collected prior to that date, those numbers will be displayed in addition to any MDC numbers recorded since then. (PR #6543) + +### Changes to PID Provider JVM Settings + +In preparation for a future feature to use multiple PID providers at the same time, all JVM settings for PID providers +have been enabled to be configured using MicroProfile Config. In the same go, they were renamed to match the name +of the provider to be configured. + +Please watch your log files for deprecation warnings. Your old settings will be picked up, but you should migrate +to the new names to avoid unnecessary log clutter and get prepared for more future changes. An example message +looks like this: + +``` +[#|2023-03-31T16:55:27.992+0000|WARNING|Payara 5.2022.5|edu.harvard.iq.dataverse.settings.source.AliasConfigSource|_ThreadID=30;_ThreadName=RunLevelControllerThread-1680281704925;_TimeMillis=1680281727992;_LevelValue=900;| + Detected deprecated config option doi.username in use. Please update your config to use dataverse.pid.datacite.username.|#] +``` + +Here is a list of the new settings: + +- dataverse.pid.datacite.mds-api-url +- dataverse.pid.datacite.rest-api-url +- dataverse.pid.datacite.username +- dataverse.pid.datacite.password +- dataverse.pid.handlenet.key.path +- dataverse.pid.handlenet.key.passphrase +- dataverse.pid.handlenet.index +- dataverse.pid.permalink.base-url +- dataverse.pid.ezid.api-url +- dataverse.pid.ezid.username +- dataverse.pid.ezid.password + +See also https://guides.dataverse.org/en/5.14/installation/config.html#persistent-identifiers-and-publishing-datasets (multiple PRs: #8823 #8828) + +### Signposting for Dataverse + +This release adds [Signposting](https://signposting.org) support to Dataverse to improve machine discoverability of datasets and files. (PR #8424) + +The following MicroProfile Config options are now available (these can be treated as JVM options): + +- dataverse.signposting.level1-author-limit +- dataverse.signposting.level1-item-limit + +Signposting is described in more detail in a new page in the Admin Guide on discoverability: https://guides.dataverse.org/en/5.14/admin/discoverability.html + +### Permalinks support + +Dataverse now optionally supports PermaLinks, a type of persistent identifier that does not involve a global registry service. PermaLinks are appropriate for Intranet deployment and catalog use cases. (PR #8674) + + +### Creating datasets with incomplete metadata through API + +It is now possible to create a dataset with some nominally mandatory metadata fields left unpopulated. For details on the use case that lead to this feature see issue #8822 and PR #8940. + +The create dataset API call (POST to /api/dataverses/#dataverseId/datasets) is extended with the "doNotValidate" parameter. However, in order to be able to create a dataset with incomplete metadata, the Solr configuration must be updated first with the new "schema.xml" file (do not forget to run the metadata fields update script when you use custom metadata). Reindexing is optional, but recommended. Also, even when this feature is not used, it is recommended to update the Solr configuration and reindex the metadata. Finally, this new feature can be activated with the "dataverse.api.allow-incomplete-metadata" JVM option. + +You can also enable a valid/incomplete metadata filter in the "My Data" page using the "dataverse.ui.show-validity-filter" JVM option. By default, this filter is not shown. When you wish to use this filter, you must reindex the datasets first, otherwise datasets with valid metadata will not be shown in the results. + +It is not possible to publish datasets with incomplete or incomplete metadata. By default, you also cannot send such datasets for review. If you wish to enable sending for review of datasets with incomplete metadata, turn on the "dataverse.ui.allow-review-for-incomplete" JVM option. + +In order to customize the wording and add translations to the UI sections extended by this feature, you can edit the "Bundle.properties" file and the localized versions of that file. The property keys used by this feature are: +- incomplete +- valid +- dataset.message.incomplete.warning +- mydataFragment.validity +- dataverses.api.create.dataset.error.mustIncludeAuthorName + +### Registering PIDs (DOIs or Handles) for files in select collections + +It is now possible to configure registering PIDs for files in individual collections. + +For example, registration of PIDs for files can be enabled in a specific collection when it is disabled instance-wide. Or it can be disabled in specific collections where it is enabled by default. See the [:FilePIDsEnabled](https://guides.dataverse.org/en/5.14/installation/config.html#filepidsenabled) section of the Configuration guide for details. (PR #9614) + +### Mechanism Added for Adding External Exporters + +It is now possible for third parties to develop and share code to provide new metadata export formats for Dataverse. Export formats can be made available via the Dataverse UI and API or configured for use in Harvesting. Dataverse now provides developers with a separate dataverse-spi JAR file that contains the Java interfaces and classes required to create a new metadata Exporter. Once a new Exporter has been created and packaged as a JAR file, administrators can use it by specifying a local directory for third party Exporters, dropping then Exporter JAR there, and restarting Payara. This mechanism also allows new Exporters to replace any of Dataverse's existing metadata export formats. (PR #9175). See also https://guides.dataverse.org/en/5.14/developers/metadataexport.html + +#### Backward Incompatibilities + +Care should be taken when replacing Dataverse's internal metadata export formats as third party code, including other third party Exporters may depend on the contents of those export formats. When replacing an existing format, one must also remember to delete the cached metadata export files or run the reExport command for the metadata exports of existing datasets to be updated. + +#### New JVM/MicroProfile Settings + +dataverse.spi.export.directory - specifies a directory, readable by the Dataverse server. Any Exporter JAR files placed in this directory will be read by Dataverse and used to add/replace the specified metadata format. + +### Contact Email Improvements + +Email sent from the contact forms to the contact(s) for a collection, dataset, or datafile can now optionally be cc'd to a support email address. The support email address can be changed from the default :SystemEmail address to a separate :SupportEmail address. When multiple contacts are listed, the system will now send one email to all contacts (with the optional cc if configured) instead of separate emails to each contact. Contact names with a comma that refer to Organizations will no longer have the name parts reversed in the email greeting. A new protected/admin feedback API has been added. (PR #9186) See https://guides.dataverse.org/en/5.14/api/native-api.html#send-feedback-to-contact-s + +#### New JVM/MicroProfile Settings + +dataverse.mail.support-email - allows a separate email, distinct from the :SystemEmail to be used as the to address in emails from the contact form/ feedback api. +dataverse.mail.cc-support-on-contact-emails - include the support email address as a CC: entry when contact/feedback emails are sent to the contacts for a collection, dataset, or datafile. + +### Support for Grouping Dataset Files by Folder and Category Tag + +Dataverse now supports grouping dataset files by folder and/or optionally by Tag/Category. The default for whether to order by folder can be changed via :OrderByFolder. Ordering by category must be enabled by an administrator via the :CategoryOrder parameter which is used to specify which tags appear first (e.g. to put Documentation files before Data or Code files, etc.) These Group-By options work with the existing sort options, i.e. sorting alphabetically means that files within each folder or tag group will be sorted alphabetically. :AllowUsersToManageOrdering can be set to true to allow users to turn folder ordering and category ordering (if enabled) on or off in the current dataset view. (PR #9204) + +#### New Settings + +:CategoryOrder - a comma separated list of Category/Tag names defining the order in which files with those tags should be displayed. The setting can include custom tag names along with the pre-defined defaults ( Documentation, Data, and Code, which can be overridden by the ::FileCategories setting.) +:OrderByFolder - defaults to true - whether to group files in the same folder together +:AllowUserManagementOfOrder - default false - allow users to toggle ordering on/off in the dataset display + +### Metadata field Series now repeatable + +This enhancement allows depositors to define multiple instances of the metadata field Series in the Citation Metadata block. + +Data contained in a dataset may belong to multiple series. Making the field repeatable makes it possible to reflect this fact in the dataset metadata. (PR #9256) + +### Guides in PDF Format + +An experimental version of the guides in PDF format is available at (PR #9474) + +Advice for anyone who wants to help improve the PDF is available at https://guides.dataverse.org/en/5.14/developers/documentation.html#pdf-version-of-the-guides + +### Datasets API extended + +The following APIs have been added: (PR #9592) + +- `/api/datasets/summaryFieldNames` +- `/api/datasets/privateUrlDatasetVersion/{privateUrlToken}` +- `/api/datasets/privateUrlDatasetVersion/{privateUrlToken}/citation` +- `/api/datasets/{datasetId}/versions/{version}/citation` + +### Extra fields included in the JSON metadata + +The following fields are now available in the native JSON output: + +- `alternativePersistentId` +- `publicationDate` +- `citationDate` + +(PR #9657) + + +### Files downloaded from Binder are now in their original format. + +For example, data.dta (a Stata file) will be downloaded instead of data.tab (the archival version Dataverse creates as part of a successful ingest). (PR #9483) + +This should make it easier to write code to reproduce results as the dataset authors and subsequent researchers are likely operating on the original file format rather that the format that Dataverse creates. + +For details, see #9374, , and . + +### Handling of license information fixed in the API + +(PR #9568) + +When publishing a dataset via API, it now requires the dataset to either have a standard license configured, or have valid Custom Terms of Use (if allowed by the instance). Attempting to publish a dataset without such **will fail with an error message**. This introduces a backward incompatibility, and if you have scripts that automatically create, update and publish datasets, this last step may start failing. Because, unfortunately, there were some problems with the datasets APIs that made it difficult to manage licenses, so an API user was likely to end up with a dataset missing either of the above. In this release we have addressed it by making the following fixes: + +We fixed the incompatibility between the format in which license information was *exported* in json, and the format the create and update APIs were expecting it for *import* (https://github.com/IQSS/dataverse/issues/9155). This means that the following json format can now be imported: +``` +"license": { + "name": "CC0 1.0", + "uri": "http://creativecommons.org/publicdomain/zero/1.0" +} +``` +However, for the sake of backward compatibility the old format +``` +"license" : "CC0 1.0" +``` +will be accepted as well. + +We have added the default license (CC0) to the model json file that we provide and recommend to use as the model in the Native API Guide (https://github.com/IQSS/dataverse/issues/9364). + +And we have corrected the misleading language in the same guide where we used to recommend to users that they select, edit and re-import only the `.metadataBlocks` fragment of the json metadata representing the latest version. There are in fact other useful pieces of information that need to be preserved in the update (such as the `"license"` section above). So the recommended way of creating base json for updates via the API is to select *everything but* the `"files"` section, with (for example) the following `jq` command: + +``` +jq '.data | del(.files)' +``` + +Please see the [Update Metadata For a Dataset](https://guides.dataverse.org/en/5.14/api/native-api.html#update-metadata-for-a-dataset) section of our Native Api guide for more information. + + +### New External Tool Type and Implementation + +With this release a new experimental external tool type has been added to the Dataverse Software. The tool type is "query" and its first implementation is an experimental tool named [Ask the Data](https://github.com/IQSS/askdataverse) which allows users to ask natural language queries of tabular files in Dataverse. More information is available in the External Tools section of the guides. (PR #9737) See https://guides.dataverse.org/en/5.14/admin/external-tools.html#file-level-query-tools + +### Default Value for File PIDs registration has changed + +The default for whether PIDs are registered for files or not is now false. + +Installations where file PIDs were enabled by default will have to add the :FilePIDsEnabled = true setting to maintain the existing functionality. + +See Step 9 of the upgrade instructions: + + If your installation did not have :FilePIDsEnabled set, you will need to set it to true to keep file PIDs enabled: + + curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:FilePIDsEnabled + + +It is now possible to allow File PIDs to be enabled/disabled per collection. See the [:AllowEnablingFilePIDsPerCollection](https://guides.dataverse.org/en/latest/installation/config.html#allowenablingfilepidspercollection) section of the Configuration guide for details. + +For example, registration of PIDs for files can now be enabled in a specific collection when it is disabled instance-wide. Or it can be disabled in specific collections where it is enabled by default. + + +### Changes and fixes in this release not already mentioned above include: + +- An endpoint for deleting a file has been added to the native API: https://guides.dataverse.org/en/5.14/api/native-api.html#deleting-files (PR #9383) +- A date column has been added to the restricted file access request overview, indicating when the earliest request by that user was made. An issue was fixed where where the request list was not updated when a request was approved or rejected. (PR #9257) +- Changes made in v5.13 and v5.14 in multiple PRs to improve the embedded Schema.org metadata in dataset pages will only be propagated to the Schema.Org JSON-LD metadata export if a reExportAll() is done. (PR #9102) +- It is now possible to write external vocabulary scripts that target a single child field in a metadata block. Example scripts are now available at https://github.com/gdcc/dataverse-external-vocab-support that can be configured to support lookup from the Research Orgnaization Registry (ROR) for the Author Affiliation Field and for the CrossRef Funding Registry (Fundreg) in the Funding Information/Agency field, both in the standard Citation metadata block. Application if these scripts to other fields, and the development of other scripts targetting child fields are now possible (PR #9402) +- Dataverse now supports requiring a secret key to add or edit metadata in specified "system" metadata blocks. Changing the metadata in such system metadata blocks is not allowed without the key and is currently only allowed via API. (PR #9388) +- An attempt will be made to extract a geospatial bounding box (west, south, east, north) from NetCDF and HDF5 files and then insert these values into the geospatial metadata block, if enabled. (#9541) See https://guides.dataverse.org/en/5.14/user/dataset-management.html#geospatial-bounding-box +- A file previewer called H5Web is now available for exploring and visualizing NetCDF and HDF5 files. (PR #9600) See https://guides.dataverse.org/en/5.14/user/dataset-management.html#h5web-previewer +- Two file previewers for GeoTIFF and Shapefiles are now available for visualizing geotiff image files and zipped Shapefiles on a map. See https://github.com/gdcc/dataverse-previewers +- New alternative to setup the Dataverse dependencies for the development environment through Docker Compose. (PR #9417) +- New alternative, explained in the documentation, to build the Sphinx guides through a Docker container. (PR #9417) +- A container has been added called "configbaker" that configures Dataverse while running in containers. This allows developers to spin up Dataverse with a single command. (PR #9574) +- Direct upload via the Dataverse UI will now support any algorithm configured via the `:FileFixityChecksumAlgorithm` setting. External apps using the direct upload API can now query Dataverse to discover which algorithm should be used. Sites that have been using an algorithm other than MD5 and direct upload and/or dvwebloader may want to use the `/api/admin/updateHashValues` call (see https://guides.dataverse.org/en/5.14/installation/config.html?highlight=updatehashvalues#filefixitychecksumalgorithm) to replace any MD5 hashes on existing files. (PR #9482) +- The OAI_ORE metadata export (and hence the archival Bag for a dataset) now includes information about file embargoes. (PR #9698) +- DatasetFieldType attribute "displayFormat", is now returned by the API. (PR #9668) +- An API named "MyData" has been available for years but is newly documented. It is used to get a list of the objects (datasets, collections or datafiles) that an authenticated user can modify. (PR #9596) +- A Go client library for Dataverse APIs is now available. See https://guides.dataverse.org/en/5.14/api/client-libraries.html +- A feature flag called "api-session-auth" has been added temporarily to aid in the development of the new frontend (#9063) but will be removed once bearer tokens (#9229) have been implemented. There is a security risk (CSRF) in enabling this flag! Do not use it in production! For more information, see https://guides.dataverse.org/en/5.14/installation/config.html#feature-flags +- A feature flag called "api-bearer-auth" has been added. This allows OIDC useraccounts to send authenticated API requests using Bearer Tokens. Note: This feature is limited to OIDC! For more information, see https://guides.dataverse.org/en/5.14/installation/config.html#feature-flags (PR #9591) + + +## Complete List of Changes + +For the complete list of code changes in this release, see the [5.14 milestone](https://github.com/IQSS/dataverse/milestone/108?closed=1) on GitHub. diff --git a/doc/release-notes/5042-add-mydata-doc-api.md b/doc/release-notes/5042-add-mydata-doc-api.md deleted file mode 100644 index 5a77e266725..00000000000 --- a/doc/release-notes/5042-add-mydata-doc-api.md +++ /dev/null @@ -1,2 +0,0 @@ -An API named 'MyData' is supported by Dataverse. A documentation has been added describing its use (PR #9596) -This API is used to get a list of only the objects (datasets, dataverses or datafiles) that an authenticated user can modify. diff --git a/doc/release-notes/6542-mdc-legacy-counts.md b/doc/release-notes/6542-mdc-legacy-counts.md deleted file mode 100644 index 1f439747a08..00000000000 --- a/doc/release-notes/6542-mdc-legacy-counts.md +++ /dev/null @@ -1,3 +0,0 @@ -###For installations using MDC (Make Data Count), it is now possible to display both the MDC metrics and the legacy access counts, generated before MDC was enabled. - -This is enabled via the new setting `:MDCStartDate` that specifies the cutoff date. If a dataset has any legacy access counts collected prior to that date, those numbers will be displayed in addition to the any MDC numbers recorded since then. diff --git a/doc/release-notes/7000-pidproviders.md b/doc/release-notes/7000-pidproviders.md deleted file mode 100644 index 04f7c25078d..00000000000 --- a/doc/release-notes/7000-pidproviders.md +++ /dev/null @@ -1,30 +0,0 @@ -# Changes to PID Provider JVM Settings - -In prepration for a future feature to use multiple PID providers at the same time, all JVM settings for PID providers -have been enabled to be configured using MicroProfile Config. In the same go, they were renamed to match the name -of the provider to be configured. - -Please watch your log files for deprecation warnings. Your old settings will be picked up, but you should migrate -to the new names to avoid unnecessary log clutter and get prepared for more future changes. An example message -looks like this: - -``` -[#|2023-03-31T16:55:27.992+0000|WARNING|Payara 5.2022.5|edu.harvard.iq.dataverse.settings.source.AliasConfigSource|_ThreadID=30;_ThreadName=RunLevelControllerThread-1680281704925;_TimeMillis=1680281727992;_LevelValue=900;| - Detected deprecated config option doi.username in use. Please update your config to use dataverse.pid.datacite.username.|#] -``` - -Here is a list of the new settings: - -- dataverse.pid.datacite.mds-api-url -- dataverse.pid.datacite.rest-api-url -- dataverse.pid.datacite.username -- dataverse.pid.datacite.password -- dataverse.pid.handlenet.key.path -- dataverse.pid.handlenet.key.passphrase -- dataverse.pid.handlenet.index -- dataverse.pid.permalink.base-url -- dataverse.pid.ezid.api-url -- dataverse.pid.ezid.username -- dataverse.pid.ezid.password - -See also http://preview.guides.gdcc.io/en/develop/installation/config.html#persistent-identifiers-and-publishing-datasets diff --git a/doc/release-notes/8092-timestamp-of-data-access-request.md b/doc/release-notes/8092-timestamp-of-data-access-request.md deleted file mode 100644 index 506b78018a9..00000000000 --- a/doc/release-notes/8092-timestamp-of-data-access-request.md +++ /dev/null @@ -1,3 +0,0 @@ -A date column has been added to the restricted file access request overview, indicating when the earliest request by that user was made. - -An issue was fixed where where the request list was not updated when a request was approved or rejected. diff --git a/doc/release-notes/8094-java-17.md b/doc/release-notes/8094-java-17.md new file mode 100644 index 00000000000..f3c81145465 --- /dev/null +++ b/doc/release-notes/8094-java-17.md @@ -0,0 +1 @@ +Java 17 or higher is now required. diff --git a/doc/release-notes/8305-payara6-ee10-v3.md b/doc/release-notes/8305-payara6-ee10-v3.md new file mode 100644 index 00000000000..94369e0211f --- /dev/null +++ b/doc/release-notes/8305-payara6-ee10-v3.md @@ -0,0 +1,5 @@ +Payara has been updated from version 5 to 6. + +Developers, you are encouraged to upgrade to Payara 6 immediately. + +Sysadmins, instructions on how to upgrade production installations will be written as part of https://github.com/IQSS/dataverse/issues/9340 diff --git a/doc/release-notes/8424-signposting.md b/doc/release-notes/8424-signposting.md deleted file mode 100644 index 6994ba92cb7..00000000000 --- a/doc/release-notes/8424-signposting.md +++ /dev/null @@ -1,8 +0,0 @@ -# Signposting for Dataverse - -This release adds [Signposting](https://signposting.org/) support to Dataverse to improve machine discoverability of datasets and files. - -The following MicroProfile Config options are now available (these can be treated as JVM options): - -- dataverse.signposting.level1-author-limit -- dataverse.signposting.level1-item-limit diff --git a/doc/release-notes/8674-permalinks.md b/doc/release-notes/8674-permalinks.md deleted file mode 100644 index 5d47b378c91..00000000000 --- a/doc/release-notes/8674-permalinks.md +++ /dev/null @@ -1 +0,0 @@ -Dataverse now optionally supports PermaLinks, a type of persistent identifier that does not involve a global registry service. PermaLinks are appropriate for Intranet deployment and catalog use cases. diff --git a/doc/release-notes/8822-incomplete-datasets-via-api.md b/doc/release-notes/8822-incomplete-datasets-via-api.md deleted file mode 100644 index dd0d98e2d3f..00000000000 --- a/doc/release-notes/8822-incomplete-datasets-via-api.md +++ /dev/null @@ -1,14 +0,0 @@ -### Creating datasets with incomplete metadata through API - -The create dataset API call (POST to /api/dataverses/#dataverseId/datasets) is extended with the "doNotValidate" parameter. However, in order to be able to create a dataset with incomplete metadata, the solr configuration must be updated first with the new "schema.xml" file (do not forget to run the metadata fields update script when you use custom metadata). Reindexing is optional, but recommended. Also, even when this feature is not used, it is recommended to update the solar configuration and reindex the metadata. Finally, this new feature can be activated with the "dataverse.api.allow-incomplete-metadata" JVM option. - -You can also enable a valid/incomplete metadata filter in the "My Data" page using the "dataverse.ui.show-validity-filter" JVM option. By default, this filter is not shown. When you wish to use this filter, you must reindex the datasets first, otherwise datasets with valid metadata will not be shown in the results. - -It is not possible to publish datasets with incomplete or incomplete metadata. By default, you also cannot send such datasets for review. If you wish to enable sending for review of datasets with incomplete metadata, turn on the "dataverse.ui.allow-review-for-incomplete" JVM option. - -In order to customize the wording and add translations to the UI sections extended by this feature, you can edit the "Bundle.properties" file and the localized versions of that file. The property keys used by this feature are: -- incomplete -- valid -- dataset.message.incomplete.warning -- mydataFragment.validity -- dataverses.api.create.dataset.error.mustIncludeAuthorName diff --git a/doc/release-notes/8889-2-filepids-in-collections-changes.md b/doc/release-notes/8889-2-filepids-in-collections-changes.md deleted file mode 100644 index a3693d9008e..00000000000 --- a/doc/release-notes/8889-2-filepids-in-collections-changes.md +++ /dev/null @@ -1,15 +0,0 @@ -The default for whether PIDs are registered for files or not is now false. - -Installations where file PIDs were enabled by default will have to add the :FilePIDsEnabled = true setting to maintain the existing functionality. - -Add step to install: - - If your installation did not have :FilePIDsEnabled set, you will need to set it to true to keep file PIDs enabled: - - curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:FilePIDsEnabled - - - -It is now possible to allow File PIDs to be enabled/disabled per collection. See the [:AllowEnablingFilePIDsPerCollection](https://guides.dataverse.org/en/latest/installation/config.html#allowenablingfilepidspercollection) section of the Configuration guide for details. - -For example, registration of PIDs for files can now be enabled in a specific collection when it is disabled instance-wide. Or it can be disabled in specific collections where it is enabled by default. \ No newline at end of file diff --git a/doc/release-notes/8889-filepids-in-collections.md b/doc/release-notes/8889-filepids-in-collections.md deleted file mode 100644 index bc8aeea3b56..00000000000 --- a/doc/release-notes/8889-filepids-in-collections.md +++ /dev/null @@ -1,3 +0,0 @@ -It is now possible to configure registering PIDs for files in individual collections. - -For example, registration of PIDs for files can be enabled in a specific collection when it is disabled instance-wide. Or it can be disabled in specific collections where it is enabled by default. See the [:FilePIDsEnabled](https://guides.dataverse.org/en/latest/installation/config.html#filepidsenabled) section of the Configuration guide for details. \ No newline at end of file diff --git a/doc/release-notes/9063-session-api-auth.md b/doc/release-notes/9063-session-api-auth.md deleted file mode 100644 index a972486e481..00000000000 --- a/doc/release-notes/9063-session-api-auth.md +++ /dev/null @@ -1 +0,0 @@ -A feature flag called "api-session-auth" has been added temporarily to aid in the development of the new frontend (#9063) but will be removed once bearer tokens (#9229) have been implemented. There is a security risk (CSRF) in enabling this flag! Do not use it in production! For more information, see http://preview.guides.gdcc.io/en/develop/installation/config.html#feature-flags diff --git a/doc/release-notes/9100-schema.org-updates.md b/doc/release-notes/9100-schema.org-updates.md deleted file mode 100644 index c76a0cc38e5..00000000000 --- a/doc/release-notes/9100-schema.org-updates.md +++ /dev/null @@ -1,3 +0,0 @@ -Changes made in v5.13 and v5.14 in multiple PRs to improve the embedded Schema.org metadata in dataset pages will only be propagated to the Schema.Org JSON-LD metadata export if a reExportAll() is done. - -The 5.14 release notes should include the standard instructions for doing a reExportAll after updating the code. \ No newline at end of file diff --git a/doc/release-notes/9148-license-via-api.md b/doc/release-notes/9148-license-via-api.md deleted file mode 100644 index 4c27af941e3..00000000000 --- a/doc/release-notes/9148-license-via-api.md +++ /dev/null @@ -1,28 +0,0 @@ -# License management via API - -See https://github.com/IQSS/dataverse/issues/9148. - -When publishing a dataset via API, it now requires the dataset to either have a standard license configured, or have valid Custom Terms of Use (if allowed by the instance). Attempting to publish a dataset without such **will fail with an error message**. This introduces a backward incompatibility, and if you have scripts that automatically create, update and publish datasets, this last step may start failing. Because, unfortunately, there were some problems with the datasets APIs that made it difficult to manage licenses, so an API user was likely to end up with a dataset missing either of the above. In this release we have addressed it by making the following fixes: - -We fixed the incompatibility between the format in which license information was *exported* in json, and the format the create and update APIs were expecting it for *import* (https://github.com/IQSS/dataverse/issues/9155). This means that the following json format can now be imported: -``` -"license": { - "name": "CC0 1.0", - "uri": "http://creativecommons.org/publicdomain/zero/1.0" -} -``` -However, for the sake of backward compatibility the old format -``` -"license" : "CC0 1.0" -``` -will be accepted as well. - -We have added the default license (CC0) to the model json file that we provide and recommend to use as the model in the Native API Guide (https://github.com/IQSS/dataverse/issues/9364). - -And we have corrected the misleading language in the same guide where we used to recommend to users that they select, edit and re-import only the `.metadataBlocks` fragment of the json metadata representing the latest version. There are in fact other useful pieces of information that need to be preserved in the update (such as the `"license"` section above). So the recommended way of creating base json for updates via the API is to select *everything but* the `"files"` section, with (for example) the following `jq` command: - -``` -jq '.data | del(.files)' -``` - -Please see the [Update Metadata For a Dataset](https://guides.dataverse.org/en/latest/api/native-api.html#update-metadata-for-a-dataset) section of our Native Api guide for more information. diff --git a/doc/release-notes/9150-improved-external-vocab-supprt.md b/doc/release-notes/9150-improved-external-vocab-supprt.md deleted file mode 100644 index 5ae678e185b..00000000000 --- a/doc/release-notes/9150-improved-external-vocab-supprt.md +++ /dev/null @@ -1 +0,0 @@ -It is now possible to write external vocabulary scripts that target a single child field in a metadata block. Example scripts are now available at https://github.com/gdcc/dataverse-external-vocab-support that can be configured to support lookup from the Research Orgnaization Registry (ROR) for the Author Affiliation Field and for the CrossRef Funding Registry (Fundreg) in the Funding Information/Agency field, both in the standard Citation metadata block. Application if these scripts to other fields, and the development of other scripts targetting child fields are now possible. \ No newline at end of file diff --git a/doc/release-notes/9175-external-exporters.md b/doc/release-notes/9175-external-exporters.md deleted file mode 100644 index 79b6358dac5..00000000000 --- a/doc/release-notes/9175-external-exporters.md +++ /dev/null @@ -1,11 +0,0 @@ -## Ability to Create New Exporters - -It is now possible for third parties to develop and share code to provide new metadata export formats for Dataverse. Export formats can be made available via the Dataverse UI and API or configured for use in Harvesting. Dataverse now provides developers with a separate dataverse-spi JAR file that contains the Java interfaces and classes required to create a new metadata Exporter. Once a new Exporter has been created and packaged as a JAR file, administrators can use it by specifying a local directory for third party Exporters, dropping then Exporter JAR there, and restarting Payara. This mechanism also allows new Exporters to replace any of Dataverse's existing metadata export formats. - -## Backward Incompatibilities - -Care should be taken when replacing Dataverse's internal metadata export formats as third party code, including other third party Exporters may depend on the contents of those export formats. When replacing an existing format, one must also remember to delete the cached metadata export files or run the reExport command for the metadata exports of existing datasets to be updated. - -## New JVM/MicroProfile Settings - -dataverse.spi.export.directory - specifies a directory, readable by the Dataverse server. Any Exporter JAR files placed in this directory will be read by Dataverse and used to add/replace the specified metadata format. \ No newline at end of file diff --git a/doc/release-notes/9185-contact-email-updates.md b/doc/release-notes/9185-contact-email-updates.md deleted file mode 100644 index 3e03461a383..00000000000 --- a/doc/release-notes/9185-contact-email-updates.md +++ /dev/null @@ -1,12 +0,0 @@ -## Contact Email Improvements - -Email sent from the contact forms to the contact(s) for a collection, dataset, or datafile can now optionally be cc'd to a support email address. The support email address can be changed from the default :SystemEmail address to a separate :SupportEmail address. When multiple contacts are listed, the system will now send one email to all contacts (with the optional cc if configured) instead of separate emails to each contact. Contact names with a comma that refer to Organizations will no longer have the name parts reversed in the email greeting. A new protected feedback API has been added. - -## Backward Incompatibilities - -When there are multiple contacts, the system will now send one email with all of the contacts in the To: header instead of sending one email to each contact (with no indication that others have been notified). - -## New JVM/MicroProfile Settings - -dataverse.mail.support-email - allows a separate email, distinct from the :SystemEmail to be used as the to address in emails from the contact form/ feedback api. -dataverse.mail.cc-support-on-contact-emails - include the support email address as a CC: entry when contact/feedback emails are sent to the contacts for a collection, dataset, or datafile. \ No newline at end of file diff --git a/doc/release-notes/9204-group-by-file-ordering.md b/doc/release-notes/9204-group-by-file-ordering.md deleted file mode 100644 index 97f6b0750fb..00000000000 --- a/doc/release-notes/9204-group-by-file-ordering.md +++ /dev/null @@ -1,9 +0,0 @@ -### Support for Grouping Dataset Files by Folder and Category Tag - -Dataverse now supports grouping dataset files by folder and/or optionally by Tag/Category. The default for whether to order by folder can be changed via :OrderByFolder. Ordering by category must be enabled by an administrator via the :CategoryOrder parameter which is used to specify which tags appear first (e.g. to put Documentation files before Data or Code files, etc.) These Group-By options work with the existing sort options, i.e. sorting alphabetically means that files within each folder or tag group will be sorted alphabetically. :AllowUsersToManageOrdering can be set to true to allow users to turn folder ordering and category ordering (if enabled) on or off in the current dataset view. - -### New Setting - -:CategoryOrder - a comma separated list of Category/Tag names defining the order in which files with those tags should be displayed. The setting can include custom tag names along with the pre-defined defaults ( Documentation, Data, and Code, which can be overridden by the ::FileCategories setting.) -:OrderByFolder - defaults to true - whether to group files in the same folder together -:AllowUserManagementOfOrder - default false - allow users to toggle ordering on/off in the dataset display \ No newline at end of file diff --git a/doc/release-notes/9229-bearer-api-auth.md b/doc/release-notes/9229-bearer-api-auth.md deleted file mode 100644 index 543b803469b..00000000000 --- a/doc/release-notes/9229-bearer-api-auth.md +++ /dev/null @@ -1 +0,0 @@ -A feature flag called "api-bearer-auth" has been added. This allows OIDC useraccounts to send authenticated API requests using Bearer Tokens. Note: This feature is limited to OIDC! For more information, see http://preview.guides.gdcc.io/en/develop/installation/config.html#feature-flags diff --git a/doc/release-notes/9256-series.md b/doc/release-notes/9256-series.md deleted file mode 100644 index 97419af4616..00000000000 --- a/doc/release-notes/9256-series.md +++ /dev/null @@ -1,42 +0,0 @@ -## Metadata field Series now repeatable -This enhancement allows depositors to define multiple instances of the metadata field Series in the Citation Metadata block. - -## Major Use Cases and Infrastructure Enhancements -* Data contained in a dataset may belong to multiple series. Making the field Series repeatable will make it possible to reflect this fact in the dataset metadata. (Issue #9255, PR #9256) - -### Additional Upgrade Steps - -Update the Citation metadata block: - -wget https://github.com/IQSS/dataverse/releases/download/v5.14/citation.tsv -curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @citation.tsv -H "Content-type: text/tab-separated-values" - -## Additional Release Steps - -1. Replace Solr schema.xml to allow multiple series to be used. See specific instructions below for those installations without custom metadata blocks (1a) and those with custom metadata blocks (1b). - -1a. - -For installations without Custom Metadata Blocks: - --stop solr instance (usually service solr stop, depending on solr installation/OS, see the Installation Guide - --replace schema.xml - -cp /tmp/dvinstall/schema.xml /usr/local/solr/solr-8.11.1/server/solr/collection1/conf - --start solr instance (usually service solr start, depending on solr/OS) - -1b. - -For installations with Custom Metadata Blocks: - --stop solr instance (usually service solr stop, depending on solr installation/OS, see the Installation Guide - -edit the following lines to your schema.xml (to indicate that series and its components are now multiValued='true"): - - - - - --restart solr instance (usually service solr start, depending on solr/OS) \ No newline at end of file diff --git a/doc/release-notes/9260-solr930.md b/doc/release-notes/9260-solr930.md new file mode 100644 index 00000000000..d4d9adca61c --- /dev/null +++ b/doc/release-notes/9260-solr930.md @@ -0,0 +1,59 @@ +Solr has been upgraded to Solr 9. You must install Solr fresh and reindex. You cannot use your old schema.xml because the format has changed. + +Specifically, to install Solr fresh you should follow the instructions for new installations, found at https://guides.dataverse.org/en/9260-solr930/installation/prerequisites.html#installing-solr + +These instructions are copied below and tweaked a bit for an upgrade scenario. + +We assume that you already have a user called "solr" (from the instructions above), added during your initial installation of Solr. + +1. Become the "solr" user and then download and configure Solr. + + ``` + su - solr + cd /usr/local/solr + wget https://archive.apache.org/dist/lucene/solr/9.3.0/solr-9.3.0.tgz + tar xvzf solr-9.3.0.tgz + cd solr-9.3.0 + cp -r server/solr/configsets/_default server/solr/collection1 + ``` + +1. Unzip "dvinstall.zip" from this release. Unzip it into /tmp. Then copy the following files into place. + + ``` + cp /tmp/dvinstall/schema*.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf + + cp /tmp/dvinstall/solrconfig.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf + ``` + +1. Tell Solr to create the core "collection1" on startup. + + ``` + echo "name=collection1" > /usr/local/solr/solr-9.3.0/server/solr/collection1/core.properties + ``` + +1. Update your init script. + + Your init script may be located at `/etc/systemd/system/solr.service`, for example. Update the path to Solr to be `/usr/local/solr/solr-9.3.0`. + +1. Start Solr using your init script and check collection1. + + The collection1 check below should print out fields Dataverse uses like "dsDescription". + + ``` + systemctl start solr.service + curl http://localhost:8983/solr/collection1/schema/fields + ``` + +1. Reindex Solr. + + For details, see https://guides.dataverse.org/en/9260-solr930/admin/solr-search-index.html + + ``` + curl http://localhost:8080/api/admin/index + ``` + +1. If you have custom metadata blocks installed, you must update your Solr schema.xml to include your custom fields. + + For details, please see https://guides.dataverse.org/en/9260-solr930/admin/metadatacustomization.html#updating-the-solr-schema + + At a high level you will be copying custom fields from the output of http://localhost:8080/api/admin/index/solr/schema or using a script to automate this. diff --git a/doc/release-notes/9277-nonstopmode-pdf-guides.md b/doc/release-notes/9277-nonstopmode-pdf-guides.md deleted file mode 100644 index eab456416ba..00000000000 --- a/doc/release-notes/9277-nonstopmode-pdf-guides.md +++ /dev/null @@ -1,3 +0,0 @@ -An experimental version of the guides in PDF format is available at - -Advice for contributors to documentation who want to help improve the PDF is available at http://preview.guides.gdcc.io/en/develop/developers/documentation.html#pdf-version-of-the-guides diff --git a/doc/release-notes/9331-extract-bounding-box.md b/doc/release-notes/9331-extract-bounding-box.md deleted file mode 100644 index dfd2c4cadb7..00000000000 --- a/doc/release-notes/9331-extract-bounding-box.md +++ /dev/null @@ -1,5 +0,0 @@ -An attempt will be made to extract a geospatial bounding box (west, south, east, north) from NetCDF and HDF5 files and then insert these values into the geospatial metadata block, if enabled. - -The following JVM setting has been added: - -- dataverse.netcdf.geo-extract-s3-direct-upload diff --git a/doc/release-notes/9340-payara5to6.md b/doc/release-notes/9340-payara5to6.md new file mode 100644 index 00000000000..68162ef7598 --- /dev/null +++ b/doc/release-notes/9340-payara5to6.md @@ -0,0 +1,132 @@ +## Upgrade from Payara 5 to Payara 6 + +1. Download Payara 6.2023.8 as of this writing: + + `curl -L -O https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2023.8/payara-6.2023.8.zip` + +1. Unzip it to /usr/local (or your preferred location): + + `sudo unzip payara-6.2023.8.zip -d /usr/local/` + +1. Change ownership of the unzipped Payara to your "service" user ("dataverse" by default): + + `sudo chown -R dataverse /usr/local/payara6` + +1. Undeploy Dataverse (if deployed, using the unprivileged service account. Version 5.14 is assumed in the example below): + + `sudo -u dataverse /usr/local/payara5/bin/asadmin list-applications` + + `sudo -u dataverse /usr/local/payara5/bin/asadmin undeploy dataverse-5.14` + +1. Stop Payara 5: + + `sudo -u dataverse /usr/local/payara5/bin/asadmin stop-domain` + +1. Copy Dataverse-related lines from Payara 5 to Payara 6 domain.xml: + + `sudo -u dataverse cp /usr/local/payara6/glassfish/domains/domain1/config/domain.xml /usr/local/payara6/glassfish/domains/domain1/config/domain.xml.orig` + + `sudo egrep 'dataverse|doi' /usr/local/payara5/glassfish/domains/domain1/config/domain.xml > lines.txt` + + `sudo vi /usr/local/payara6/glassfish/domains/domain1/config/domain.xml` + + The lines will appear in two sections, examples shown below (but your content will vary). + + Section 1: system properties (under ``) + + ``` + + + + + + ``` + + Note: if you used the Dataverse installer, you won't have a `dataverse.db.password` property. See "Create password aliases" below. + + Section 2: JVM options (under ``, the one under ``, not under ``) + + ``` + -Ddataverse.files.directory=/usr/local/dvn/data + -Ddataverse.files.file.type=file + -Ddataverse.files.file.label=file + -Ddataverse.files.file.directory=/usr/local/dvn/data + -Ddataverse.rserve.host=localhost + -Ddataverse.rserve.port=6311 + -Ddataverse.rserve.user=rserve + -Ddataverse.rserve.password=rserve + -Ddataverse.auth.password-reset-timeout-in-minutes=60 + -Ddataverse.timerServer=true + -Ddataverse.fqdn=dev1.dataverse.org + -Ddataverse.siteUrl=https://dev1.dataverse.org + -Ddataverse.files.storage-driver-id=file + -Ddoi.username=testaccount + -Ddoi.password=notmypassword + -Ddoi.baseurlstring=https://mds.test.datacite.org/ + -Ddoi.dataciterestapiurlstring=https://api.test.datacite.org + ``` + +1. Check the `Xmx` setting in `/usr/local/payara6/glassfish/domains/domain1/config/domain.xml`. (The one under ``, where you put the JVM options, not the one under ``.) Note that there are two such settings, and you want to adjust the one in the stanza with Dataverse options. This sets the JVM heap size; a good rule of thumb is half of your system's total RAM. You may specify the value in MB (`8192m`) or GB (`8g`). + +1. Copy jhove.conf and jhoveConfig.xsd from Payara 5, edit and change payara5 to payara6 + + `sudo cp /usr/local/payara5/glassfish/domains/domain1/config/jhove* /usr/local/payara6/glassfish/domains/domain1/config/` + + `sudo chown dataverse /usr/local/payara6/glassfish/domains/domain1/config/jhove*` + + `sudo -u dataverse vi /usr/local/payara6/glassfish/domains/domain1/config/jhove.conf` + +1. Copy logos from Payara 5 to Payara 6 + + These logos are for collections (dataverses). + + `sudo -u dataverse cp -r /usr/local/payara5/glassfish/domains/domain1/docroot/logos /usr/local/payara6/glassfish/domains/domain1/docroot` + +1. If you are using Make Data Count (MDC), edit :MDCLogPath + + Your `:MDCLogPath` database setting might be pointing to a Payara 5 directory such as `/usr/local/payara5/glassfish/domains/domain1/logs`. If so, edit this to be Payara 6. You'll probably want to copy your logs over as well. + +1. Update systemd unit file (or other init system) from `/usr/local/payara5` to `/usr/local/payara6`, if applicable. + +1. Start Payara: + + `sudo -u dataverse /usr/local/payara6/bin/asadmin start-domain` + +1. Create a Java mail resource, replacing "localhost" for mailhost with your mail relay server, and replacing "localhost" for fromaddress with the FQDN of your Dataverse server: + + `sudo -u dataverse /usr/local/payara6/bin/asadmin create-javamail-resource --mailhost "localhost" --mailuser "dataversenotify" --fromaddress "do-not-reply@localhost" mail/notifyMailSession` + +1. Create password aliases for your database, rserve and datacite jvm-options, if you're using them: + + ``` + $ echo "AS_ADMIN_ALIASPASSWORD=yourDBpassword" > /tmp/dataverse.db.password.txt + $ sudo -u dataverse /usr/local/payara6/bin/asadmin create-password-alias --passwordfile /tmp/dataverse.db.password.txt + Enter the value for the aliasname operand> dataverse.db.password + Command create-password-alias executed successfully. + ``` + + You'll want to perform similar commands for `rserve_password_alias` and `doi_password_alias` if you're using Rserve and/or Datacite. + +1. Enable workaround for FISH-7722: + + The following workaround is for https://github.com/payara/Payara/issues/6337 + + `sudo -u dataverse /usr/local/payara6/bin/asadmin create-jvm-options --add-opens=java.base/java.io=ALL-UNNAMED` + +1. Create the network listener on port 8009 + + `sudo -u dataverse /usr/local/payara6/bin/asadmin create-network-listener --protocol http-listener-1 --listenerport 8009 --jkenabled true jk-connector` + +1. Deploy the Dataverse 6.0 warfile: + + `sudo -u dataverse /usr/local/payara6/bin/asadmin deploy /path/to/dataverse-6.0.war` + +1. Check that you get a version number from Dataverse: + + `curl http://localhost:8080/api/info/version` + +1. Perform one final Payara restart to ensure that timers are initialized properly: + + `sudo -u dataverse /usr/local/payara6/bin/asadmin stop-domain` + + `sudo -u dataverse /usr/local/payara6/bin/asadmin start-domain` diff --git a/doc/release-notes/9374-binder-orig.md b/doc/release-notes/9374-binder-orig.md deleted file mode 100644 index 5e6ff0e5c4d..00000000000 --- a/doc/release-notes/9374-binder-orig.md +++ /dev/null @@ -1,7 +0,0 @@ -Files downloaded from Binder are now in their original format. - -For example, data.dta (a Stata file) will be downloaded instead of data.tab (the archival version Dataverse creates as part of a successful ingest). - -This should make it easier to write code to reproduce results as the dataset authors and subsequent researchers are likely operating on the original file format rather that the format that Dataverse creates. - -For details, see #9374, , and . diff --git a/doc/release-notes/9387-system_metadata_blocks.md b/doc/release-notes/9387-system_metadata_blocks.md deleted file mode 100644 index 69110d141f8..00000000000 --- a/doc/release-notes/9387-system_metadata_blocks.md +++ /dev/null @@ -1 +0,0 @@ -Dataverse supports requiring a secret key to add or edit metadata in specified 'system' metadata blocks. Changing the metadata in such system metadata blocks is not allowed without the key and is currently only allowed via API. diff --git a/doc/release-notes/9414-containerized_dev_dependencies.md b/doc/release-notes/9414-containerized_dev_dependencies.md deleted file mode 100644 index 4e3680573f9..00000000000 --- a/doc/release-notes/9414-containerized_dev_dependencies.md +++ /dev/null @@ -1,2 +0,0 @@ -New alternative to setup the Dataverse dependencies for the development environment through Docker Compose. -New alternative, explained in the documentation, to build the Sphinx guides through a Docker container. diff --git a/doc/release-notes/9431-checksum-alg-in-direct-uploads.md b/doc/release-notes/9431-checksum-alg-in-direct-uploads.md deleted file mode 100644 index e754686f3f0..00000000000 --- a/doc/release-notes/9431-checksum-alg-in-direct-uploads.md +++ /dev/null @@ -1,4 +0,0 @@ -Direct upload via the Dataverse UI will now support any algorithm configured via the :FileFixityChecksumAlgorithm setting. -External apps using the direct upload API can now query Dataverse to discover which algorithm should be used. - -Sites that have been using an algorithm other than MD5 and direct upload and/or dvwebloader may want to use the /api/admin/updateHashValues call (see https://guides.dataverse.org/en/latest/installation/config.html?highlight=updatehashvalues#filefixitychecksumalgorithm) to replace any MD5 hashes on existing files. diff --git a/doc/release-notes/9434-app-container.md b/doc/release-notes/9434-app-container.md deleted file mode 100644 index b5088bee328..00000000000 --- a/doc/release-notes/9434-app-container.md +++ /dev/null @@ -1,12 +0,0 @@ -Developers can experiment with running Dataverse in Docker: https://preview.guides.gdcc.io/en/develop/container/app-image.html - -This is an image developers build locally. It is not meant for production use! - -To provide a complete container-based local development environment, developers can deploy a Dataverse container from -the new image in addition to other containers for the necessary dependencies: -https://preview.guides.gdcc.io/en/develop/container/dev-usage.html - -Please note that with this emerging solution we will sunset older tooling like `docker-aio` and `docker-dcm`. -We envision more testing possibilities in the future, to be discussed as part of the -[Dataverse Containerization Working Group](https://dc.wgs.gdcc.io). There is no roadmap yet, but you have been warned. -If there is some specific feature of these tools you would like to be kept, please [reach out](https://dc.wgs.gdcc.io). diff --git a/doc/release-notes/9480-h5web.md b/doc/release-notes/9480-h5web.md deleted file mode 100644 index 97beff70e4a..00000000000 --- a/doc/release-notes/9480-h5web.md +++ /dev/null @@ -1 +0,0 @@ -A file previewer called H5Web is now available for exploring and visualizing NetCDF and HDF5 files. diff --git a/doc/release-notes/9558-async-indexing.md b/doc/release-notes/9558-async-indexing.md deleted file mode 100644 index a44eac1ff75..00000000000 --- a/doc/release-notes/9558-async-indexing.md +++ /dev/null @@ -1,3 +0,0 @@ -Performance improvements, especially for large datasets containing thousands of files. -Uploading files one by one to the dataset is much faster now, allowing uploading thousands of files in an acceptable timeframe. Not only uploading a file, but all edit operations on datasets containing many files, got faster. -Performance tweaks include indexing of the datasets in the background and optimizations in the amount of the indexing operations needed. Furthermore, updates to the dateset no longer wait for ingesting to finish. Ingesting was already running in the background, but it took a lock, preventing updating the dataset and degrading performance for datasets containing many files. \ No newline at end of file diff --git a/doc/release-notes/9573-configbaker.md b/doc/release-notes/9573-configbaker.md deleted file mode 100644 index bb68134794c..00000000000 --- a/doc/release-notes/9573-configbaker.md +++ /dev/null @@ -1 +0,0 @@ -A container has been added called "configbaker" that configures Dataverse while running in containers. This allows developers to spin up Dataverse with a single command. diff --git a/doc/release-notes/9588-datasets-api-extension.md b/doc/release-notes/9588-datasets-api-extension.md deleted file mode 100644 index f4fd6354d47..00000000000 --- a/doc/release-notes/9588-datasets-api-extension.md +++ /dev/null @@ -1,6 +0,0 @@ -The following APIs have been added: - -- /api/datasets/summaryFieldNames -- /api/datasets/privateUrlDatasetVersion/{privateUrlToken} -- /api/datasets/privateUrlDatasetVersion/{privateUrlToken}/citation -- /api/datasets/{datasetId}/versions/{version}/citation diff --git a/doc/release-notes/9656-api-optional-dataset-params.md b/doc/release-notes/9656-api-optional-dataset-params.md deleted file mode 100644 index 5d08f26386a..00000000000 --- a/doc/release-notes/9656-api-optional-dataset-params.md +++ /dev/null @@ -1,5 +0,0 @@ -The following fields are now available in the native JSON output: - -- alternativePersistentId -- publicationDate -- citationDate diff --git a/doc/release-notes/9667-metadatablocks-api-extension.md b/doc/release-notes/9667-metadatablocks-api-extension.md deleted file mode 100644 index 067b0f37303..00000000000 --- a/doc/release-notes/9667-metadatablocks-api-extension.md +++ /dev/null @@ -1 +0,0 @@ -DatasetFieldType attribute "displayFormat", is now returned by the API. diff --git a/doc/release-notes/9691-go-client-library.md b/doc/release-notes/9691-go-client-library.md deleted file mode 100644 index b0b7bae536b..00000000000 --- a/doc/release-notes/9691-go-client-library.md +++ /dev/null @@ -1 +0,0 @@ -A Go client library is now available. See https://preview.guides.gdcc.io/en/develop/api/client-libraries.html diff --git a/doc/release-notes/9698-embargo-info-in-ORE.md b/doc/release-notes/9698-embargo-info-in-ORE.md deleted file mode 100644 index 388be52da62..00000000000 --- a/doc/release-notes/9698-embargo-info-in-ORE.md +++ /dev/null @@ -1 +0,0 @@ -The OAI_ORE metadata export (and hence the archival Bag for a dataset) now includes information about file embargoes diff --git a/doc/release-notes/9704-GeotifShp.md b/doc/release-notes/9704-GeotifShp.md deleted file mode 100644 index 3a663ec3944..00000000000 --- a/doc/release-notes/9704-GeotifShp.md +++ /dev/null @@ -1 +0,0 @@ -Two file previewers for GeoTIFF and Shapefiles are now available for visualizing geotiff image files and zipped Shapefiles on a map. diff --git a/doc/release-notes/9782-juni5-transition.md b/doc/release-notes/9782-juni5-transition.md new file mode 100644 index 00000000000..b7ffcc0de0d --- /dev/null +++ b/doc/release-notes/9782-juni5-transition.md @@ -0,0 +1,7 @@ +# Migrating all test to JUnit 5 +With this release, we transition all of our test cases (see `src/test/`) to use JUnit 5 only. +Moving forward from JUnit 4 will allow writing tests in more concise and easier ways. +The tests themselves have not been altered, but updated to match JUnit 5 ways. +They have not been extended or dropped coverage; this is mostly a preparation of things to come in the future. +If you are writing tests in JUnit 4 in your feature branches, you need to migrate. +The development guides section of testing has been updated as well. diff --git a/doc/release-notes/9812-archiver-warnings.md b/doc/release-notes/9812-archiver-warnings.md new file mode 100644 index 00000000000..716223b3f46 --- /dev/null +++ b/doc/release-notes/9812-archiver-warnings.md @@ -0,0 +1,7 @@ +# Potential Archiver Incompatibilities with Payara6 +The Google Cloud and DuraCloud Archivers (see https://guides.dataverse.org/en/latest/installation/config.html#bagit-export) may not work in v6.0. +This is due to their dependence on libraries that include classes in javax.* packages that are no longer available. +If these classes are actually used when the archivers run, the archivers would fail. +As these two archivers require additional setup, they have not been tested in v6.0. +Community members using these archivers or considering their use are encouraged to test them with v6.0 and report any errors and/or provide fixes for them that can be included in future releases. + diff --git a/doc/release-notes/9838-rm-vagrant.md b/doc/release-notes/9838-rm-vagrant.md new file mode 100644 index 00000000000..910f2e0b2f0 --- /dev/null +++ b/doc/release-notes/9838-rm-vagrant.md @@ -0,0 +1 @@ +Vagrant has been removed. See #9838. diff --git a/doc/shib/shib.md b/doc/shib/shib.md index 2c178a93f35..9cff6d827e7 100644 --- a/doc/shib/shib.md +++ b/doc/shib/shib.md @@ -82,11 +82,7 @@ Run `service httpd restart`. ## Update/verify files under /etc/shibboleth -For /etc/shibboleth/shibboleth2.xml use the version from https://github.com/IQSS/dataverse/blob/master/conf/vagrant/etc/shibboleth/shibboleth2.xml but replace "pdurbin.pagekite.me" with the "shibtest.dataverse.org". - -Put https://github.com/IQSS/dataverse/blob/master/conf/vagrant/etc/shibboleth/dataverse-idp-metadata.xml at /etc/shibboleth/dataverse-idp-metadata.xml - -Put https://github.com/IQSS/dataverse/blob/master/conf/vagrant/etc/shibboleth/attribute-map.xml at +Get files from the Installation Guide. After making these changes, run `service shibd restart` and `service httpd restart`. diff --git a/doc/sphinx-guides/source/_static/admin/counter-processor-config.yaml b/doc/sphinx-guides/source/_static/admin/counter-processor-config.yaml index 4f338905751..26144544d9e 100644 --- a/doc/sphinx-guides/source/_static/admin/counter-processor-config.yaml +++ b/doc/sphinx-guides/source/_static/admin/counter-processor-config.yaml @@ -1,8 +1,8 @@ # currently no other option but to have daily logs and have year-month-day format in the name with # 4-digit year and 2-digit month and day -# /usr/local/payara5/glassfish/domains/domain1/logs/counter_2019-01-11.log +# /usr/local/payara6/glassfish/domains/domain1/logs/counter_2019-01-11.log #log_name_pattern: sample_logs/counter_(yyyy-mm-dd).log -log_name_pattern: /usr/local/payara5/glassfish/domains/domain1/logs/mdc/counter_(yyyy-mm-dd).log +log_name_pattern: /usr/local/payara6/glassfish/domains/domain1/logs/mdc/counter_(yyyy-mm-dd).log # path_types regular expressions allow matching to classify page urls as either an investigation or request # based on specific URL structure for your system. diff --git a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv index 826989643ba..8543300dd2c 100644 --- a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv +++ b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv @@ -1,6 +1,7 @@ -Tool Type Scope Description -Data Explorer explore file A GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. See the README.md file at https://github.com/scholarsportal/dataverse-data-explorer-v2 for the instructions on adding Data Explorer to your Dataverse. -Whole Tale explore dataset A platform for the creation of reproducible research packages that allows users to launch containerized interactive analysis environments based on popular tools such as Jupyter and RStudio. Using this integration, Dataverse users can launch Jupyter and RStudio environments to analyze published datasets. For more information, see the `Whole Tale User Guide `_. -Binder explore dataset Binder allows you to spin up custom computing environments in the cloud (including Jupyter notebooks) with the files from your dataset. `Installation instructions `_ are in the Data Exploration Lab girder_ythub project. See also :ref:`binder`. -File Previewers explore file A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, text, video, tabular data, spreadsheets, GeoJSON, GeoTIFF, Shapefile, zip, HDF5, NetCDF, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers -Data Curation Tool configure file A GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Curation-Tool for the installation instructions. +Tool Type Scope Description +Data Explorer explore file "A GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. See the README.md file at https://github.com/scholarsportal/dataverse-data-explorer-v2 for the instructions on adding Data Explorer to your Dataverse." +Whole Tale explore dataset "A platform for the creation of reproducible research packages that allows users to launch containerized interactive analysis environments based on popular tools such as Jupyter and RStudio. Using this integration, Dataverse users can launch Jupyter and RStudio environments to analyze published datasets. For more information, see the `Whole Tale User Guide `_." +Binder explore dataset Binder allows you to spin up custom computing environments in the cloud (including Jupyter notebooks) with the files from your dataset. `Installation instructions `_ are in the Data Exploration Lab girder_ythub project. +File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" +Data Curation Tool configure file "A GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Curation-Tool for the installation instructions." +Ask the Data query file Ask the Data is an experimental tool that allows you ask natural language questions about the data contained in Dataverse tables (tabular data). See the README.md file at https://github.com/IQSS/askdataverse/tree/main/askthedata for the instructions on adding Ask the Data to your Dataverse installation. diff --git a/scripts/vagrant/counter-processor-config.yaml b/doc/sphinx-guides/source/_static/developers/counter-processor-config.yaml similarity index 100% rename from scripts/vagrant/counter-processor-config.yaml rename to doc/sphinx-guides/source/_static/developers/counter-processor-config.yaml diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.root b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.root index 1de94331523..b9ef9960318 100755 --- a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.root +++ b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.root @@ -4,7 +4,7 @@ set -e -ASADMIN=/usr/local/payara5/bin/asadmin +ASADMIN=/usr/local/payara6/bin/asadmin case "$1" in start) diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.service b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.service index 7c457e615d8..19bb190e740 100755 --- a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.service +++ b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.service @@ -3,7 +3,7 @@ # description: Payara App Server set -e -ASADMIN=/usr/local/payara5/bin/asadmin +ASADMIN=/usr/local/payara6/bin/asadmin APP_SERVER_USER=dataverse case "$1" in diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr index 7ca04cdff3f..9cf8902eb14 100755 --- a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr +++ b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr @@ -5,7 +5,7 @@ # chkconfig: 35 92 08 # description: Starts and stops Apache Solr -SOLR_DIR="/usr/local/solr/solr-8.11.1" +SOLR_DIR="/usr/local/solr/solr-9.3.0" SOLR_COMMAND="bin/solr" SOLR_ARGS="-m 1g -j jetty.host=127.0.0.1" SOLR_USER=solr diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/systemd/payara.service b/doc/sphinx-guides/source/_static/installation/files/etc/systemd/payara.service index c8c82f6d6b2..c8efcb9c6f9 100644 --- a/doc/sphinx-guides/source/_static/installation/files/etc/systemd/payara.service +++ b/doc/sphinx-guides/source/_static/installation/files/etc/systemd/payara.service @@ -4,9 +4,9 @@ After = syslog.target network.target [Service] Type = forking -ExecStart = /usr/bin/java -jar /usr/local/payara5/glassfish/lib/client/appserver-cli.jar start-domain -ExecStop = /usr/bin/java -jar /usr/local/payara5/glassfish/lib/client/appserver-cli.jar stop-domain -ExecReload = /usr/bin/java -jar /usr/local/payara5/glassfish/lib/client/appserver-cli.jar restart-domain +ExecStart = /usr/bin/java -jar /usr/local/payara6/glassfish/lib/client/appserver-cli.jar start-domain +ExecStop = /usr/bin/java -jar /usr/local/payara6/glassfish/lib/client/appserver-cli.jar stop-domain +ExecReload = /usr/bin/java -jar /usr/local/payara6/glassfish/lib/client/appserver-cli.jar restart-domain User=dataverse LimitNOFILE=32768 Environment="LANG=en_US.UTF-8" diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service b/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service index d89ee108377..0b8a8528490 100644 --- a/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service +++ b/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service @@ -5,9 +5,9 @@ After = syslog.target network.target remote-fs.target nss-lookup.target [Service] User = solr Type = forking -WorkingDirectory = /usr/local/solr/solr-8.11.1 -ExecStart = /usr/local/solr/solr-8.11.1/bin/solr start -m 1g -j "jetty.host=127.0.0.1" -ExecStop = /usr/local/solr/solr-8.11.1/bin/solr stop +WorkingDirectory = /usr/local/solr/solr-9.3.0 +ExecStart = /usr/local/solr/solr-9.3.0/bin/solr start -m 1g -j "jetty.host=127.0.0.1" +ExecStop = /usr/local/solr/solr-9.3.0/bin/solr stop LimitNOFILE=65000 LimitNPROC=65000 Restart=on-failure diff --git a/doc/sphinx-guides/source/_static/installation/files/usr/local/payara5/glassfish/domains/domain1/config/logging.properties b/doc/sphinx-guides/source/_static/installation/files/usr/local/payara5/glassfish/domains/domain1/config/logging.properties deleted file mode 100644 index 4054c794452..00000000000 --- a/doc/sphinx-guides/source/_static/installation/files/usr/local/payara5/glassfish/domains/domain1/config/logging.properties +++ /dev/null @@ -1,166 +0,0 @@ -# -# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. -# -# Copyright (c) 2013 Oracle and/or its affiliates. All rights reserved. -# -# The contents of this file are subject to the terms of either the GNU -# General Public License Version 2 only ("GPL") or the Common Development -# and Distribution License("CDDL") (collectively, the "License"). You -# may not use this file except in compliance with the License. You can -# obtain a copy of the License at -# https://glassfish.dev.java.net/public/CDDL+GPL_1_1.html -# or packager/legal/LICENSE.txt. See the License for the specific -# language governing permissions and limitations under the License. -# -# When distributing the software, include this License Header Notice in each -# file and include the License file at packager/legal/LICENSE.txt. -# -# GPL Classpath Exception: -# Oracle designates this particular file as subject to the "Classpath" -# exception as provided by Oracle in the GPL Version 2 section of the License -# file that accompanied this code. -# -# Modifications: -# If applicable, add the following below the License Header, with the fields -# enclosed by brackets [] replaced by your own identifying information: -# "Portions Copyright [year] [name of copyright owner]" -# -# Contributor(s): -# If you wish your version of this file to be governed by only the CDDL or -# only the GPL Version 2, indicate your decision by adding "[Contributor] -# elects to include this software in this distribution under the [CDDL or GPL -# Version 2] license." If you don't indicate a single choice of license, a -# recipient has the option to distribute your version of this file under -# either the CDDL, the GPL Version 2 or to extend the choice of license to -# its licensees as provided above. However, if you add GPL Version 2 code -# and therefore, elected the GPL Version 2 license, then the option applies -# only if the new code is made subject to such option by the copyright -# holder. -# -# Portions Copyright [2016-2021] [Payara Foundation and/or its affiliates] - -#GlassFish logging.properties list -#Update June 13 2012 - -#All attributes details -handlers=java.util.logging.ConsoleHandler -handlerServices=com.sun.enterprise.server.logging.GFFileHandler,com.sun.enterprise.server.logging.SyslogHandler -java.util.logging.ConsoleHandler.formatter=com.sun.enterprise.server.logging.UniformLogFormatter -java.util.logging.FileHandler.count=1 -java.util.logging.FileHandler.formatter=java.util.logging.XMLFormatter -java.util.logging.FileHandler.limit=50000 -java.util.logging.FileHandler.pattern=%h/java%u.log -com.sun.enterprise.server.logging.GFFileHandler.compressOnRotation=false -com.sun.enterprise.server.logging.GFFileHandler.excludeFields= -com.sun.enterprise.server.logging.GFFileHandler.file=${com.sun.aas.instanceRoot}/logs/server.log -com.sun.enterprise.server.logging.GFFileHandler.flushFrequency=1 -com.sun.enterprise.server.logging.GFFileHandler.formatter=com.sun.enterprise.server.logging.ODLLogFormatter -com.sun.enterprise.server.logging.GFFileHandler.level=ALL -com.sun.enterprise.server.logging.GFFileHandler.logStandardStreams=true -com.sun.enterprise.server.logging.GFFileHandler.logtoConsole=false -com.sun.enterprise.server.logging.GFFileHandler.logtoFile=true -com.sun.enterprise.server.logging.GFFileHandler.maxHistoryFiles=0 -com.sun.enterprise.server.logging.GFFileHandler.multiLineMode=true -com.sun.enterprise.server.logging.GFFileHandler.retainErrorsStasticsForHours=0 -com.sun.enterprise.server.logging.GFFileHandler.rotationLimitInBytes=2000000 -com.sun.enterprise.server.logging.GFFileHandler.rotationOnDateChange=false -com.sun.enterprise.server.logging.GFFileHandler.rotationTimelimitInMinutes=0 -com.sun.enterprise.server.logging.SyslogHandler.level=ALL -com.sun.enterprise.server.logging.SyslogHandler.useSystemLogging=false -log4j.logger.org.hibernate.validator.util.Version=warn -com.sun.enterprise.server.logging.UniformLogFormatter.ansiColor=true - -#Payara Notification logging properties -fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.compressOnRotation=false -fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.file=${com.sun.aas.instanceRoot}/logs/notification.log -fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.formatter=com.sun.enterprise.server.logging.ODLLogFormatter -fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.logtoFile=true -fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.maxHistoryFiles=0 -fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.rotationLimitInBytes=2000000 -fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.rotationOnDateChange=false -fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.rotationTimelimitInMinutes=0 -fish.payara.deprecated.jsonlogformatter.underscoreprefix=false - -#All log level details - -.level=INFO -ShoalLogger.level=CONFIG -com.hazelcast.level=WARNING -java.util.logging.ConsoleHandler.level=FINEST -javax.enterprise.resource.corba.level=INFO -javax.enterprise.resource.javamail.level=INFO -javax.enterprise.resource.jdo.level=INFO -javax.enterprise.resource.jms.level=INFO -javax.enterprise.resource.jta.level=INFO -javax.enterprise.resource.resourceadapter.level=INFO -javax.enterprise.resource.sqltrace.level=FINE -javax.enterprise.resource.webcontainer.jsf.application.level=INFO -javax.enterprise.resource.webcontainer.jsf.config.level=INFO -javax.enterprise.resource.webcontainer.jsf.context.level=INFO -javax.enterprise.resource.webcontainer.jsf.facelets.level=INFO -javax.enterprise.resource.webcontainer.jsf.lifecycle.level=INFO -javax.enterprise.resource.webcontainer.jsf.managedbean.level=INFO -javax.enterprise.resource.webcontainer.jsf.renderkit.level=INFO -javax.enterprise.resource.webcontainer.jsf.resource.level=INFO -javax.enterprise.resource.webcontainer.jsf.taglib.level=INFO -javax.enterprise.resource.webcontainer.jsf.timing.level=INFO -javax.enterprise.system.container.cmp.level=INFO -javax.enterprise.system.container.ejb.level=INFO -javax.enterprise.system.container.ejb.mdb.level=INFO -javax.enterprise.system.container.web.level=INFO -javax.enterprise.system.core.classloading.level=INFO -javax.enterprise.system.core.config.level=INFO -javax.enterprise.system.core.level=INFO -javax.enterprise.system.core.security.level=INFO -javax.enterprise.system.core.selfmanagement.level=INFO -javax.enterprise.system.core.transaction.level=INFO -javax.enterprise.system.level=INFO -javax.enterprise.system.ssl.security.level=INFO -javax.enterprise.system.tools.admin.level=INFO -javax.enterprise.system.tools.backup.level=INFO -javax.enterprise.system.tools.deployment.common.level=WARNING -javax.enterprise.system.tools.deployment.dol.level=WARNING -javax.enterprise.system.tools.deployment.level=INFO -javax.enterprise.system.util.level=INFO -javax.enterprise.system.webservices.registry.level=INFO -javax.enterprise.system.webservices.rpc.level=INFO -javax.enterprise.system.webservices.saaj.level=INFO -javax.level=INFO -javax.mail.level=INFO -javax.org.glassfish.persistence.level=INFO -org.apache.catalina.level=INFO -org.apache.coyote.level=INFO -org.apache.jasper.level=INFO -org.eclipse.persistence.session.level=INFO -org.glassfish.admingui.level=INFO -org.glassfish.naming.level=INFO -org.jvnet.hk2.osgiadapter.level=INFO - -javax.enterprise.resource.corba.level=INFO -javax.enterprise.resource.jta.level=INFO -javax.enterprise.system.webservices.saaj.level=INFO -javax.enterprise.system.container.ejb.level=INFO -javax.enterprise.system.container.ejb.mdb.level=INFO -javax.enterprise.resource.javamail.level=INFO -javax.enterprise.system.webservices.rpc.level=INFO -javax.enterprise.system.container.web.level=INFO -javax.enterprise.resource.jms.level=INFO -javax.enterprise.system.webservices.registry.level=INFO -javax.enterprise.resource.webcontainer.jsf.application.level=INFO -javax.enterprise.resource.webcontainer.jsf.resource.level=INFO -javax.enterprise.resource.webcontainer.jsf.config.level=INFO -javax.enterprise.resource.webcontainer.jsf.context.level=INFO -javax.enterprise.resource.webcontainer.jsf.facelets.level=INFO -javax.enterprise.resource.webcontainer.jsf.lifecycle.level=INFO -javax.enterprise.resource.webcontainer.jsf.managedbean.level=INFO -javax.enterprise.resource.webcontainer.jsf.renderkit.level=INFO -javax.enterprise.resource.webcontainer.jsf.taglib.level=INFO -javax.enterprise.resource.webcontainer.jsf.timing.level=INFO -javax.org.glassfish.persistence.level=INFO -javax.enterprise.system.tools.backup.level=INFO -javax.mail.level=INFO -org.glassfish.admingui.level=INFO -org.glassfish.naming.level=INFO -org.eclipse.persistence.session.level=INFO -javax.enterprise.system.tools.deployment.dol.level=WARNING -javax.enterprise.system.tools.deployment.common.level=WARNING diff --git a/doc/sphinx-guides/source/_static/util/clear_timer.sh b/doc/sphinx-guides/source/_static/util/clear_timer.sh index 1d9966e4e07..641b2695084 100755 --- a/doc/sphinx-guides/source/_static/util/clear_timer.sh +++ b/doc/sphinx-guides/source/_static/util/clear_timer.sh @@ -8,7 +8,7 @@ # if you'd like to avoid that. # directory where Payara is installed -PAYARA_DIR=/usr/local/payara5 +PAYARA_DIR=/usr/local/payara6 # directory within Payara (defaults) DV_DIR=${PAYARA_DIR}/glassfish/domains/domain1 diff --git a/doc/sphinx-guides/source/_static/util/counter_daily.sh b/doc/sphinx-guides/source/_static/util/counter_daily.sh index a12439d9cf8..674972b18f2 100644 --- a/doc/sphinx-guides/source/_static/util/counter_daily.sh +++ b/doc/sphinx-guides/source/_static/util/counter_daily.sh @@ -1,7 +1,7 @@ #! /bin/bash COUNTER_PROCESSOR_DIRECTORY="/usr/local/counter-processor-0.1.04" -MDC_LOG_DIRECTORY="/usr/local/payara5/glassfish/domains/domain1/logs/mdc" +MDC_LOG_DIRECTORY="/usr/local/payara6/glassfish/domains/domain1/logs/mdc" # counter_daily.sh diff --git a/doc/sphinx-guides/source/admin/external-tools.rst b/doc/sphinx-guides/source/admin/external-tools.rst index ad6181a867a..67075e986bb 100644 --- a/doc/sphinx-guides/source/admin/external-tools.rst +++ b/doc/sphinx-guides/source/admin/external-tools.rst @@ -92,7 +92,15 @@ File Level Preview Tools File level preview tools allow the user to see a preview of the file contents without having to download it. -When a file has a preview available, a preview icon will appear next to that file in the file listing on the dataset page. On the file page itself, the preview will appear in a Preview tab either immediately or once a guestbook has been filled in or terms, if any, have been agreed to. +When a file has a preview available, a preview icon will appear next to that file in the file listing on the dataset page. On the file page itself, the preview will appear in a Preview tab (renamed File Tools, if multiple tools are available) either immediately or once a guestbook has been filled in or terms, if any, have been agreed to. + +File Level Query Tools +++++++++++++++++++++++++ + +File level query tools allow the user to ask questions (e.g. natural language queries) of a data table's contents without having to download it. + +When a file has a query tool available, a query icon will appear next to that file in the file listing on the dataset page. On the file page itself, the query tool will appear in a Query tab (renamed File Tools, if multiple tools are available) either immediately or once a guestbook has been filled in or terms, if any, have been agreed to. + File Level Configure Tools ++++++++++++++++++++++++++ diff --git a/doc/sphinx-guides/source/admin/harvestclients.rst b/doc/sphinx-guides/source/admin/harvestclients.rst index da922459f46..59fc4dc2c64 100644 --- a/doc/sphinx-guides/source/admin/harvestclients.rst +++ b/doc/sphinx-guides/source/admin/harvestclients.rst @@ -35,8 +35,8 @@ For example: .. code-block:: bash - sudo touch /usr/local/payara5/glassfish/domains/domain1/logs/stopharvest_bigarchive.70916 - sudo chown dataverse /usr/local/payara5/glassfish/domains/domain1/logs/stopharvest_bigarchive.70916 + sudo touch /usr/local/payara6/glassfish/domains/domain1/logs/stopharvest_bigarchive.70916 + sudo chown dataverse /usr/local/payara6/glassfish/domains/domain1/logs/stopharvest_bigarchive.70916 Note: If the application server is stopped and restarted, any running harvesting jobs will be killed but may remain marked as in progress in the database. We thus recommend using the mechanism here to stop ongoing harvests prior to a server restart. @@ -44,6 +44,6 @@ Note: If the application server is stopped and restarted, any running harvesting What if a Run Fails? ~~~~~~~~~~~~~~~~~~~~ -Each harvesting client run logs a separate file per run to the app server's default logging directory (``/usr/local/payara5/glassfish/domains/domain1/logs/`` unless you've changed it). Look for filenames in the format ``harvest_TARGET_YYYY_MM_DD_timestamp.log`` to get a better idea of what's going wrong. +Each harvesting client run logs a separate file per run to the app server's default logging directory (``/usr/local/payara6/glassfish/domains/domain1/logs/`` unless you've changed it). Look for filenames in the format ``harvest_TARGET_YYYY_MM_DD_timestamp.log`` to get a better idea of what's going wrong. Note that you'll want to run a minimum of Dataverse Software 4.6, optimally 4.18 or beyond, for the best OAI-PMH interoperability. diff --git a/doc/sphinx-guides/source/admin/make-data-count.rst b/doc/sphinx-guides/source/admin/make-data-count.rst index ec6ff9a685b..fe32af6649a 100644 --- a/doc/sphinx-guides/source/admin/make-data-count.rst +++ b/doc/sphinx-guides/source/admin/make-data-count.rst @@ -72,10 +72,9 @@ Enable or Disable Display of Make Data Count Metrics By default, when MDC logging is enabled (when ``:MDCLogPath`` is set), your Dataverse installation will display MDC metrics instead of it's internal (legacy) metrics. You can avoid this (e.g. to collect MDC metrics for some period of time before starting to display them) by setting ``:DisplayMDCMetrics`` to false. +The following discussion assumes ``:MDCLogPath`` has been set to ``/usr/local/payara6/glassfish/domains/domain1/logs/mdc`` You can also decide to display MDC metrics along with Dataverse's traditional download counts from the time before MDC was enabled. To do this, set the :ref:`:MDCStartDate` to when you started MDC logging. -The following discussion assumes ``:MDCLogPath`` has been set to ``/usr/local/payara5/glassfish/domains/domain1/logs/mdc`` - Configure Counter Processor ~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -104,7 +103,7 @@ Soon we will be setting up a cron job to run nightly but we start with a single * If you are running Counter Processor for the first time in the middle of a month, you will need create blank log files for the previous days. e.g.: - * ``cd /usr/local/payara5/glassfish/domains/domain1/logs/mdc`` + * ``cd /usr/local/payara6/glassfish/domains/domain1/logs/mdc`` * ``touch counter_2019-02-01.log`` diff --git a/doc/sphinx-guides/source/admin/metadatacustomization.rst b/doc/sphinx-guides/source/admin/metadatacustomization.rst index cac051ddb59..4f737bd730b 100644 --- a/doc/sphinx-guides/source/admin/metadatacustomization.rst +++ b/doc/sphinx-guides/source/admin/metadatacustomization.rst @@ -413,13 +413,10 @@ Setting Up a Dev Environment for Testing You have several options for setting up a dev environment for testing metadata block changes: -- Vagrant: See the :doc:`/developers/tools` section of the Developer Guide. -- docker-aio: See https://github.com/IQSS/dataverse/tree/develop/conf/docker-aio +- Docker: See :doc:`/container/index`. - AWS deployment: See the :doc:`/developers/deployment` section of the Developer Guide. - Full dev environment: See the :doc:`/developers/dev-environment` section of the Developer Guide. -To get a clean environment in Vagrant, you'll be running ``vagrant destroy``. In Docker, you'll use ``docker rm``. For a full dev environment or AWS installation, you might find ``rebuild`` and related scripts at ``scripts/deploy/phoenix.dataverse.org`` useful. - Editing TSV files ~~~~~~~~~~~~~~~~~ @@ -516,7 +513,7 @@ the Solr schema configuration, including any enabled metadata schemas: ``curl "http://localhost:8080/api/admin/index/solr/schema"`` -You can use :download:`update-fields.sh <../../../../conf/solr/8.11.1/update-fields.sh>` to easily add these to the +You can use :download:`update-fields.sh <../../../../conf/solr/9.3.0/update-fields.sh>` to easily add these to the Solr schema you installed for your Dataverse installation. The script needs a target XML file containing your Solr schema. (See the :doc:`/installation/prerequisites/` section of @@ -540,7 +537,7 @@ from some place else than your Dataverse installation). Please note that reconfigurations of your Solr index might require a re-index. Usually release notes indicate a necessary re-index, but for your custom metadata you will need to keep track on your own. -Please note also that if you are going to make a pull request updating ``conf/solr/8.11.1/schema.xml`` with fields you have +Please note also that if you are going to make a pull request updating ``conf/solr/9.3.0/schema.xml`` with fields you have added, you should first load all the custom metadata blocks in ``scripts/api/data/metadatablocks`` (including ones you don't care about) to create a complete list of fields. (This might change in the future.) diff --git a/doc/sphinx-guides/source/admin/troubleshooting.rst b/doc/sphinx-guides/source/admin/troubleshooting.rst index 9f085ba90cd..acbdcaae17e 100644 --- a/doc/sphinx-guides/source/admin/troubleshooting.rst +++ b/doc/sphinx-guides/source/admin/troubleshooting.rst @@ -53,15 +53,13 @@ Long-Running Ingest Jobs Have Exhausted System Resources Ingest is both CPU- and memory-intensive, and depending on your system resources and the size and format of tabular data files uploaded, may render your Dataverse installation unresponsive or nearly inoperable. It is possible to cancel these jobs by purging the ingest queue. -``/usr/local/payara5/mq/bin/imqcmd -u admin query dst -t q -n DataverseIngest`` will query the DataverseIngest destination. The password, unless you have changed it, matches the username. +``/usr/local/payara6/mq/bin/imqcmd -u admin query dst -t q -n DataverseIngest`` will query the DataverseIngest destination. The password, unless you have changed it, matches the username. -``/usr/local/payara5/mq/bin/imqcmd -u admin purge dst -t q -n DataverseIngest`` will purge the DataverseIngest queue, and prompt for your confirmation. +``/usr/local/payara6/mq/bin/imqcmd -u admin purge dst -t q -n DataverseIngest`` will purge the DataverseIngest queue, and prompt for your confirmation. Finally, list destinations to verify that the purge was successful: -``/usr/local/payara5/mq/bin/imqcmd -u admin list dst`` - -If you are still running Glassfish, substitute glassfish4 for payara5 above. If you have installed your Dataverse installation in some other location, adjust the above paths accordingly. +``/usr/local/payara6/mq/bin/imqcmd -u admin list dst`` .. _troubleshooting-payara: @@ -73,7 +71,7 @@ Payara Finding the Payara Log File ~~~~~~~~~~~~~~~~~~~~~~~~~~~ -``/usr/local/payara5/glassfish/domains/domain1/logs/server.log`` is the main place to look when you encounter problems (assuming you installed Payara in the default directory). Hopefully an error message has been logged. If there's a stack trace, it may be of interest to developers, especially they can trace line numbers back to a tagged version or commit. Send more of the stack trace (the entire file if possible) to developers who can help (see "Getting Help", below) and be sure to say which version of the Dataverse Software you have installed. +``/usr/local/payara6/glassfish/domains/domain1/logs/server.log`` is the main place to look when you encounter problems (assuming you installed Payara in the default directory). Hopefully an error message has been logged. If there's a stack trace, it may be of interest to developers, especially they can trace line numbers back to a tagged version or commit. Send more of the stack trace (the entire file if possible) to developers who can help (see "Getting Help", below) and be sure to say which version of the Dataverse Software you have installed. .. _increase-payara-logging: diff --git a/doc/sphinx-guides/source/api/external-tools.rst b/doc/sphinx-guides/source/api/external-tools.rst index eec9944338f..05affaf975e 100644 --- a/doc/sphinx-guides/source/api/external-tools.rst +++ b/doc/sphinx-guides/source/api/external-tools.rst @@ -39,7 +39,7 @@ How External Tools Are Presented to Users An external tool can appear in your Dataverse installation in a variety of ways: -- as an explore, preview, or configure option for a file +- as an explore, preview, query or configure option for a file - as an explore option for a dataset - as an embedded preview on the file landing page @@ -92,7 +92,7 @@ Terminology scope Whether the external tool appears and operates at the **file** level or the **dataset** level. Note that a file level tool much also specify the type of file it operates on (see "contentType" below). - types Whether the external tool is an **explore** tool, a **preview** tool, a **configure** tool or any combination of these (multiple types are supported for a single tool). Configure tools require an API token because they make changes to data files (files within datasets). Configure tools are currently not supported at the dataset level. The older "type" keyword that allows you to pass a single type as a string is deprecated but still supported. + types Whether the external tool is an **explore** tool, a **preview** tool, a **query** tool, a **configure** tool or any combination of these (multiple types are supported for a single tool). Configure tools require an API token because they make changes to data files (files within datasets). Configure tools are currently not supported at the dataset level. The older "type" keyword that allows you to pass a single type as a string is deprecated but still supported. toolUrl The **base URL** of the tool before query parameters are added. diff --git a/doc/sphinx-guides/source/api/getting-started.rst b/doc/sphinx-guides/source/api/getting-started.rst index 544f0921bd7..a6f6c259a25 100644 --- a/doc/sphinx-guides/source/api/getting-started.rst +++ b/doc/sphinx-guides/source/api/getting-started.rst @@ -11,7 +11,7 @@ Servers You Can Test With Rather than using a production Dataverse installation, API users are welcome to use http://demo.dataverse.org for testing. You can email support@dataverse.org if you have any trouble with this server. -If you would rather have full control over your own test server, deployments to AWS, Docker, Vagrant, and more are covered in the :doc:`/developers/index` and the :doc:`/installation/index`. +If you would rather have full control over your own test server, deployments to AWS, Docker, and more are covered in the :doc:`/developers/index` and the :doc:`/installation/index`. Getting an API Token -------------------- diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 2bbcf108cad..4d9466703e4 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -56,13 +56,13 @@ Next you need to figure out the alias or database id of the "parent" Dataverse c export SERVER_URL=https://demo.dataverse.org export PARENT=root - curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$PARENT --upload-file dataverse-complete.json + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$PARENT" --upload-file dataverse-complete.json The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/dataverses/root --upload-file dataverse-complete.json + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/dataverses/root" --upload-file dataverse-complete.json You should expect an HTTP 200 response and JSON beginning with "status":"OK" followed by a representation of the newly-created Dataverse collection. @@ -80,13 +80,13 @@ To view a published Dataverse collection: export SERVER_URL=https://demo.dataverse.org export ID=root - curl $SERVER_URL/api/dataverses/$ID + curl "$SERVER_URL/api/dataverses/$ID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl https://demo.dataverse.org/api/dataverses/root + curl "https://demo.dataverse.org/api/dataverses/root" To view an unpublished Dataverse collection: @@ -96,13 +96,13 @@ To view an unpublished Dataverse collection: export SERVER_URL=https://demo.dataverse.org export ID=root - curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root" Delete a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -117,13 +117,13 @@ Deletes the Dataverse collection whose database ID or alias is given: export SERVER_URL=https://demo.dataverse.org export ID=root - curl -H X-Dataverse-key:$API_TOKEN -X DELETE $SERVER_URL/api/dataverses/$ID + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/dataverses/$ID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X DELETE https://demo.dataverse.org/api/dataverses/root + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/dataverses/root" .. _show-contents-of-a-dataverse-api: @@ -140,13 +140,13 @@ Show Contents of a Dataverse Collection export SERVER_URL=https://demo.dataverse.org export ID=root - curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/contents + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/contents" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/contents + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/contents" Report the data (file) size of a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -159,13 +159,13 @@ Shows the combined size in bytes of all the files uploaded into the Dataverse co export SERVER_URL=https://demo.dataverse.org export ID=root - curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/storagesize + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/storagesize" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/storagesize + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/storagesize" The size of published and unpublished files will be summed both in the Dataverse collection specified and beneath all its sub-collections, recursively. By default, only the archival files are counted - i.e., the files uploaded by users (plus the tab-delimited versions generated for tabular data files on ingest). If the optional argument ``includeCached=true`` is specified, the API will also add the sizes of all the extra files generated and cached by the Dataverse installation - the resized thumbnail versions for image files, the metadata exports for published datasets, etc. @@ -181,13 +181,13 @@ All the roles defined directly in the Dataverse collection identified by ``id``: export SERVER_URL=https://demo.dataverse.org export ID=root - curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/roles + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/roles" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/roles + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/roles" List Facets Configured for a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -200,13 +200,13 @@ List Facets Configured for a Dataverse Collection export SERVER_URL=https://demo.dataverse.org export ID=root - curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/facets + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/facets" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/facets + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/facets" Set Facets for a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -219,13 +219,13 @@ Assign search facets for a given Dataverse collection identified by ``id``: export SERVER_URL=https://demo.dataverse.org export ID=root - curl -H X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/dataverses/$ID/facets --upload-file dataverse-facets.json + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$ID/facets" --upload-file dataverse-facets.json The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/dataverses/root/facets --upload-file dataverse-facets.json + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/dataverses/root/facets" --upload-file dataverse-facets.json Where :download:`dataverse-facets.json <../_static/api/dataverse-facets.json>` contains a JSON encoded list of metadata keys (e.g. ``["authorName","authorAffiliation"]``). @@ -242,13 +242,13 @@ List Metadata Block Facets Configured for a Dataverse Collection export SERVER_URL=https://demo.dataverse.org export ID=root - curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/metadatablockfacets + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/metadatablockfacets" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/metadatablockfacets + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/metadatablockfacets" Set Metadata Block Facets for a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -265,13 +265,13 @@ To clear the metadata blocks set by a parent collection, submit an empty array ( export SERVER_URL=https://demo.dataverse.org export ID=root - curl -H X-Dataverse-key:$API_TOKEN" -X POST -H "Content-type:application/json" $SERVER_URL/api/dataverses/$ID/metadatablockfacets --upload-file metadata-block-facets.json + curl -H "X-Dataverse-key:$API_TOKEN" -X POST -H "Content-type:application/json" "$SERVER_URL/api/dataverses/$ID/metadatablockfacets" --upload-file metadata-block-facets.json The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -H "Content-type:application/json" https://demo.dataverse.org/api/dataverses/root/metadatablockfacets --upload-file metadata-block-facets.json + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -H "Content-type:application/json" "https://demo.dataverse.org/api/dataverses/root/metadatablockfacets" --upload-file metadata-block-facets.json Where :download:`metadata-block-facets.json <../_static/api/metadata-block-facets.json>` contains a JSON encoded list of metadata block names (e.g. ``["socialscience","geospatial"]``). This endpoint supports an empty list (e.g. ``[]``) @@ -290,13 +290,13 @@ When updating the root to false, it will clear any metadata block facets from th export SERVER_URL=https://demo.dataverse.org export ID=root - curl -H X-Dataverse-key:$API_TOKEN -X POST -H "Content-type:application/json" $SERVER_URL/api/dataverses/$ID/metadatablockfacets/isRoot -d 'true' + curl -H "X-Dataverse-key:$API_TOKEN" -X POST -H "Content-type:application/json" "$SERVER_URL/api/dataverses/$ID/metadatablockfacets/isRoot" -d 'true' The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -H "Content-type:application/json" https://demo.dataverse.org/api/dataverses/root/metadatablockfacets/isRoot -d 'true' + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -H "Content-type:application/json" "https://demo.dataverse.org/api/dataverses/root/metadatablockfacets/isRoot" -d 'true' .. _create-role-in-collection: @@ -311,13 +311,13 @@ Creates a new role under Dataverse collection ``id``. Needs a json file with the export SERVER_URL=https://demo.dataverse.org export ID=root - curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$ID/roles --upload-file roles.json + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$ID/roles" --upload-file roles.json The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -H "Content-type:application/json" https://demo.dataverse.org/api/dataverses/root/roles --upload-file roles.json + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -H "Content-type:application/json" "https://demo.dataverse.org/api/dataverses/root/roles" --upload-file roles.json For ``roles.json`` see :ref:`json-representation-of-a-role` @@ -336,13 +336,13 @@ List all the role assignments at the given Dataverse collection: export SERVER_URL=https://demo.dataverse.org export ID=root - curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/assignments + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/assignments" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/assignments + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/assignments" Assign Default Role to User Creating a Dataset in a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -356,13 +356,13 @@ Assign a default role to a user creating a dataset in a Dataverse collection ``i export ID=root export ROLE_ALIAS=curator - curl -H X-Dataverse-key:$API_TOKEN -X PUT $SERVER_URL/api/dataverses/$ID/defaultContributorRole/$ROLE_ALIAS + curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/dataverses/$ID/defaultContributorRole/$ROLE_ALIAS" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X PUT https://demo.dataverse.org/api/dataverses/root/defaultContributorRole/curator + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/dataverses/root/defaultContributorRole/curator" Note: You may use "none" as the ``ROLE_ALIAS``. This will prevent a user who creates a dataset from having any role on that dataset. It is not recommended for Dataverse collections with human contributors. @@ -379,13 +379,13 @@ Assigns a new role, based on the POSTed JSON: export SERVER_URL=https://demo.dataverse.org export ID=root - curl -H X-Dataverse-key:$API_TOKEN -X POST -H "Content-Type: application/json" $SERVER_URL/api/dataverses/$ID/assignments --upload-file role.json + curl -H "X-Dataverse-key:$API_TOKEN" -X POST -H "Content-Type: application/json" "$SERVER_URL/api/dataverses/$ID/assignments" --upload-file role.json The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -H "Content-Type: application/json" https://demo.dataverse.org/api/dataverses/root/assignments --upload-file role.json + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -H "Content-Type: application/json" "https://demo.dataverse.org/api/dataverses/root/assignments" --upload-file role.json POSTed JSON example (the content of ``role.json`` file):: @@ -408,13 +408,13 @@ Delete the assignment whose id is ``$id``: export ID=root export ASSIGNMENT_ID=6 - curl -H X-Dataverse-key:$API_TOKEN -X DELETE $SERVER_URL/api/dataverses/$ID/assignments/$ASSIGNMENT_ID + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/dataverses/$ID/assignments/$ASSIGNMENT_ID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X DELETE https://demo.dataverse.org/api/dataverses/root/assignments/6 + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/dataverses/root/assignments/6" List Metadata Blocks Defined on a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -431,13 +431,13 @@ Please note that an API token is only required if the Dataverse collection has n export SERVER_URL=https://demo.dataverse.org export ID=root - curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/metadatablocks + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/metadatablocks" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/metadatablocks + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/metadatablocks" Define Metadata Blocks for a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -456,13 +456,13 @@ The metadata blocks that are available with a default Dataverse installation are export SERVER_URL=https://demo.dataverse.org export ID=root - curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$ID/metadatablocks -H \"Content-type:application/json\" --upload-file define-metadatablocks.json + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$ID/metadatablocks" -H \"Content-type:application/json\" --upload-file define-metadatablocks.json The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -H "Content-type:application/json" --upload-file define-metadatablocks.json https://demo.dataverse.org/api/dataverses/root/metadatablocks + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -H "Content-type:application/json" --upload-file define-metadatablocks.json "https://demo.dataverse.org/api/dataverses/root/metadatablocks" Determine if a Dataverse Collection Inherits Its Metadata Blocks from Its Parent ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -475,13 +475,13 @@ Get whether the Dataverse collection is a metadata block root, or does it uses i export SERVER_URL=https://demo.dataverse.org export ID=root - curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/metadatablocks/isRoot + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/metadatablocks/isRoot" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/metadatablocks/isRoot + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/metadatablocks/isRoot" Configure a Dataverse Collection to Inherit Its Metadata Blocks from Its Parent ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -495,13 +495,13 @@ values are ``true`` and ``false`` (both are valid JSON expressions): export SERVER_URL=https://demo.dataverse.org export ID=root - curl -H X-Dataverse-key:$API_TOKEN -X PUT $SERVER_URL/api/dataverses/$ID/metadatablocks/isRoot + curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/dataverses/$ID/metadatablocks/isRoot" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X PUT https://demo.dataverse.org/api/dataverses/root/metadatablocks/isRoot + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/dataverses/root/metadatablocks/isRoot" .. note:: Previous endpoints ``$SERVER/api/dataverses/$id/metadatablocks/:isRoot`` and ``POST http://$SERVER/api/dataverses/$id/metadatablocks/:isRoot?key=$apiKey`` are deprecated, but supported. @@ -568,7 +568,7 @@ The following is an example HTTP call with deactivated validation: export PARENT=root export SERVER_URL=https://demo.dataverse.org - curl -H X-Dataverse-key:$API_TOKEN -X POST "$SERVER_URL/api/dataverses/$PARENT/datasets?doNotValidate=true" --upload-file dataset-incomplete.json -H 'Content-type:application/json' + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$PARENT/datasets?doNotValidate=true" --upload-file dataset-incomplete.json -H 'Content-type:application/json' **Note:** You may learn about an instance's support for deposition of incomplete datasets via :ref:`info-incomplete-metadata`. @@ -589,7 +589,7 @@ Next you need to figure out the alias or database id of the "parent" Dataverse c export PARENT=root export SERVER_URL=https://demo.dataverse.org - curl -H X-Dataverse-key:$API_TOKEN -X POST "$SERVER_URL/api/dataverses/$PARENT/datasets" --upload-file dataset-finch1.json -H 'Content-type:application/json' + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$PARENT/datasets" --upload-file dataset-finch1.json -H 'Content-type:application/json' The fully expanded example above (without the environment variables) looks like this: @@ -617,13 +617,13 @@ To import a dataset with an existing persistent identifier (PID), the dataset's export DATAVERSE_ID=root export PERSISTENT_IDENTIFIER=doi:ZZ7/MOSEISLEYDB94 - curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$DATAVERSE_ID/datasets/:import?pid=$PERSISTENT_IDENTIFIER&release=yes --upload-file dataset.json + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$DATAVERSE_ID/datasets/:import?pid=$PERSISTENT_IDENTIFIER&release=yes" --upload-file dataset.json The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/dataverses/root/datasets/:import?pid=doi:ZZ7/MOSEISLEYDB94&release=yes --upload-file dataset.json + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/dataverses/root/datasets/:import?pid=doi:ZZ7/MOSEISLEYDB94&release=yes" --upload-file dataset.json The ``pid`` parameter holds a persistent identifier (such as a DOI or Handle). The import will fail if no PID is provided, or if the provided PID fails validation. @@ -658,13 +658,13 @@ To import a dataset with an existing persistent identifier (PID), you have to pr export DATAVERSE_ID=root export PERSISTENT_IDENTIFIER=doi:ZZ7/MOSEISLEYDB94 - curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$DATAVERSE_ID/datasets/:importddi?pid=$PERSISTENT_IDENTIFIER&release=yes --upload-file ddi_dataset.xml + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$DATAVERSE_ID/datasets/:importddi?pid=$PERSISTENT_IDENTIFIER&release=yes" --upload-file ddi_dataset.xml The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/dataverses/root/datasets/:importddi?pid=doi:ZZ7/MOSEISLEYDB94&release=yes --upload-file ddi_dataset.xml + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/dataverses/root/datasets/:importddi?pid=doi:ZZ7/MOSEISLEYDB94&release=yes" --upload-file ddi_dataset.xml The optional ``pid`` parameter holds a persistent identifier (such as a DOI or Handle). The import will fail if the provided PID fails validation. @@ -694,13 +694,13 @@ In order to publish a Dataverse collection, you must know either its "alias" (wh export SERVER_URL=https://demo.dataverse.org export ID=root - curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$ID/actions/:publish + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$ID/actions/:publish" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/dataverses/root/actions/:publish + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/dataverses/root/actions/:publish" You should expect a 200 ("OK") response and JSON output. @@ -723,13 +723,13 @@ In order to retrieve the Guestbook Responses for a Dataverse collection, you mus export GUESTBOOK_ID=1 export FILENAME=myResponses.csv - curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/guestbookResponses?guestbookId=$GUESTBOOK_ID -o $FILENAME + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/guestbookResponses?guestbookId=$GUESTBOOK_ID" -o $FILENAME The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/guestbookResponses?guestbookId=1 -o myResponses.csv + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/guestbookResponses?guestbookId=1" -o myResponses.csv .. _collection-attributes-api: @@ -774,13 +774,13 @@ Example: Getting the dataset whose DOI is *10.5072/FK2/J8SJZB*: export SERVER_URL=https://demo.dataverse.org export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB - curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/datasets/:persistentId/?persistentId=$PERSISTENT_IDENTIFIER + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/?persistentId=$PERSISTENT_IDENTIFIER" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:$API_TOKEN" https://demo.dataverse.org/api/datasets/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB + curl -H "X-Dataverse-key:$API_TOKEN" "https://demo.dataverse.org/api/datasets/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB" Getting its draft version: @@ -789,13 +789,13 @@ Getting its draft version: export SERVER_URL=https://demo.dataverse.org export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB - curl -H "X-Dataverse-key:$API_TOKEN" http://$SERVER/api/datasets/:persistentId/versions/:draft?persistentId=$PERSISTENT_IDENTIFIER + curl -H "X-Dataverse-key:$API_TOKEN" "http://$SERVER/api/datasets/:persistentId/versions/:draft?persistentId=$PERSISTENT_IDENTIFIER" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:$API_TOKEN" https://demo.dataverse.org/api/datasets/:persistentId/versions/:draft?persistentId=doi:10.5072/FK2/J8SJZB + curl -H "X-Dataverse-key:$API_TOKEN" "https://demo.dataverse.org/api/datasets/:persistentId/versions/:draft?persistentId=doi:10.5072/FK2/J8SJZB" |CORS| Show the dataset whose database id is passed: @@ -804,13 +804,13 @@ The fully expanded example above (without environment variables) looks like this export SERVER_URL=https://demo.dataverse.org export ID=24 - curl $SERVER_URL/api/datasets/$ID + curl "$SERVER_URL/api/datasets/$ID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl https://demo.dataverse.org/api/datasets/24 + curl "https://demo.dataverse.org/api/datasets/24" The dataset id can be extracted from the response retrieved from the API which uses the persistent identifier (``/api/datasets/:persistentId/?persistentId=$PERSISTENT_IDENTIFIER``). @@ -824,13 +824,13 @@ List Versions of a Dataset export SERVER_URL=https://demo.dataverse.org export ID=24 - curl $SERVER_URL/api/datasets/$ID/versions + curl "$SERVER_URL/api/datasets/$ID/versions" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl https://demo.dataverse.org/api/datasets/24/versions + curl "https://demo.dataverse.org/api/datasets/24/versions" It returns a list of versions with their metadata, and file list: @@ -895,13 +895,13 @@ Get Version of a Dataset export ID=24 export VERSION=1.0 - curl $SERVER_URL/api/datasets/$ID/versions/$VERSION + curl "$SERVER_URL/api/datasets/$ID/versions/$VERSION" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl https://demo.dataverse.org/api/datasets/24/versions/1.0 + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0" .. _export-dataset-metadata-api: @@ -918,13 +918,13 @@ See also :ref:`batch-exports-through-the-api` and the note below: export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB export METADATA_FORMAT=ddi - curl $SERVER_URL/api/datasets/export?exporter=$METADATA_FORMAT&persistentId=PERSISTENT_IDENTIFIER + curl "$SERVER_URL/api/datasets/export?exporter=$METADATA_FORMAT&persistentId=PERSISTENT_IDENTIFIER" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl https://demo.dataverse.org/api/datasets/export?exporter=ddi&persistentId=doi:10.5072/FK2/J8SJZB + curl "https://demo.dataverse.org/api/datasets/export?exporter=ddi&persistentId=doi:10.5072/FK2/J8SJZB" .. note:: Supported exporters (export formats) are ``ddi``, ``oai_ddi``, ``dcterms``, ``oai_dc``, ``schema.org`` , ``OAI_ORE`` , ``Datacite``, ``oai_datacite`` and ``dataverse_json``. Descriptive names can be found under :ref:`metadata-export-formats` in the User Guide. @@ -950,13 +950,13 @@ List Files in a Dataset export ID=24 export VERSION=1.0 - curl $SERVER_URL/api/datasets/$ID/versions/$VERSION/files + curl "$SERVER_URL/api/datasets/$ID/versions/$VERSION/files" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl https://demo.dataverse.org/api/datasets/24/versions/1.0/files + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files" View Dataset Files and Folders as a Directory Index ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -965,9 +965,9 @@ View Dataset Files and Folders as a Directory Index .. code-block:: bash - curl $SERVER_URL/api/datasets/${ID}/dirindex/ + curl "$SERVER_URL/api/datasets/${ID}/dirindex/" # or - curl ${SERVER_URL}/api/datasets/:persistentId/dirindex?persistentId=doi:${PERSISTENT_ID} + curl "${SERVER_URL}/api/datasets/:persistentId/dirindex?persistentId=doi:${PERSISTENT_ID}" Optional parameters: @@ -1065,13 +1065,13 @@ List All Metadata Blocks for a Dataset export ID=24 export VERSION=1.0 - curl $SERVER_URL/api/datasets/$ID/versions/$VERSION/metadata + curl "$SERVER_URL/api/datasets/$ID/versions/$VERSION/metadata" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl https://demo.dataverse.org/api/datasets/24/versions/1.0/metadata + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/metadata" List Single Metadata Block for a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1085,13 +1085,13 @@ List Single Metadata Block for a Dataset export VERSION=1.0 export METADATA_BLOCK=citation - curl $SERVER_URL/api/datasets/$ID/versions/$VERSION/metadata/$METADATA_BLOCK + curl "$SERVER_URL/api/datasets/$ID/versions/$VERSION/metadata/$METADATA_BLOCK" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl https://demo.dataverse.org/api/datasets/24/versions/1.0/metadata/citation + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/metadata/citation" Update Metadata For a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1108,13 +1108,13 @@ For example, after making your edits, your JSON file might look like :download:` export SERVER_URL=https://demo.dataverse.org export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/BCCP9Z - curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/:persistentId/versions/:draft?persistentId=$PERSISTENT_IDENTIFIER --upload-file dataset-update-metadata.json + curl -H "X-Dataverse-key: $API_TOKEN" -X PUT "$SERVER_URL/api/datasets/:persistentId/versions/:draft?persistentId=$PERSISTENT_IDENTIFIER" --upload-file dataset-update-metadata.json The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT https://demo.dataverse.org/api/datasets/:persistentId/versions/:draft?persistentId=doi:10.5072/FK2/BCCP9Z --upload-file dataset-update-metadata.json + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/:persistentId/versions/:draft?persistentId=doi:10.5072/FK2/BCCP9Z" --upload-file dataset-update-metadata.json Note that in the example JSON file above, there are only two JSON objects with the ``license`` and ``metadataBlocks`` keys respectively. When you download a representation of your latest dataset version in JSON format, these objects will be nested inside another object called ``data`` in the API response. Note that there may be more objects in there, in addition to the ``license`` and ``metadataBlocks`` that you may need to preserve and re-import as well. Basically, you need everything in there except for the ``files``. This can be achived by downloading the metadata and selecting the sections you need with a JSON tool such as ``jq``, like this: @@ -1124,13 +1124,13 @@ Note that in the example JSON file above, there are only two JSON objects with t export SERVER_URL=https://demo.dataverse.org export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/BCCP9Z - curl -H "X-Dataverse-key: $API_TOKEN" $SERVER_URL/api/datasets/:persistentId/versions/:latest?persistentId=$PERSISTENT_IDENTIFIER | jq '.data | del(.files)' > dataset-update-metadata.json + curl -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/versions/:latest?persistentId=$PERSISTENT_IDENTIFIER" | jq '.data | del(.files)' > dataset-update-metadata.json The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/datasets/:persistentId/versions/:latest?persistentId=doi:10.5072/FK2/BCCP9Z | jq '.data | {metadataBlocks: .metadataBlocks}' > dataset-update-metadata.json + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/datasets/:persistentId/versions/:latest?persistentId=doi:10.5072/FK2/BCCP9Z" | jq '.data | {metadataBlocks: .metadataBlocks}' > dataset-update-metadata.json Now you can edit the JSON produced by the command above with a text editor of your choice. For example, with ``vi`` in the example below. @@ -1154,13 +1154,13 @@ Alternatively to replacing an entire dataset version with its JSON representatio export SERVER_URL=https://demo.dataverse.org export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/BCCP9Z - curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/:persistentId/editMetadata/?persistentId=$PERSISTENT_IDENTIFIER --upload-file dataset-add-metadata.json + curl -H "X-Dataverse-key: $API_TOKEN" -X PUT "$SERVER_URL/api/datasets/:persistentId/editMetadata/?persistentId=$PERSISTENT_IDENTIFIER" --upload-file dataset-add-metadata.json The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT https://demo.dataverse.org/api/datasets/:persistentId/editMetadata/?persistentId=doi:10.5072/FK2/BCCP9Z --upload-file dataset-add-metadata.json + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/:persistentId/editMetadata/?persistentId=doi:10.5072/FK2/BCCP9Z" --upload-file dataset-add-metadata.json You may also replace existing metadata in dataset fields with the following (adding the parameter replace=true): @@ -1170,13 +1170,13 @@ You may also replace existing metadata in dataset fields with the following (add export SERVER_URL=https://demo.dataverse.org export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/BCCP9Z - curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/:persistentId/editMetadata?persistentId=$PERSISTENT_IDENTIFIER&replace=true --upload-file dataset-update-metadata.json + curl -H "X-Dataverse-key: $API_TOKEN" -X PUT "$SERVER_URL/api/datasets/:persistentId/editMetadata?persistentId=$PERSISTENT_IDENTIFIER&replace=true" --upload-file dataset-update-metadata.json The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT https://demo.dataverse.org/api/datasets/:persistentId/editMetadata/?persistentId=doi:10.5072/FK2/BCCP9Z&replace=true --upload-file dataset-update-metadata.json + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/:persistentId/editMetadata/?persistentId=doi:10.5072/FK2/BCCP9Z&replace=true" --upload-file dataset-update-metadata.json For these edits your JSON file need only include those dataset fields which you would like to edit. A sample JSON file may be downloaded here: :download:`dataset-edit-metadata-sample.json <../_static/api/dataset-edit-metadata-sample.json>` @@ -1191,13 +1191,13 @@ You may delete some of the metadata of a dataset version by supplying a file wit export SERVER_URL=https://demo.dataverse.org export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/BCCP9Z - curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/:persistentId/deleteMetadata/?persistentId=$PERSISTENT_IDENTIFIER --upload-file dataset-delete-author-metadata.json + curl -H "X-Dataverse-key: $API_TOKEN" -X PUT "$SERVER_URL/api/datasets/:persistentId/deleteMetadata/?persistentId=$PERSISTENT_IDENTIFIER" --upload-file dataset-delete-author-metadata.json The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT https://demo.dataverse.org/api/datasets/:persistentId/deleteMetadata/?persistentId=doi:10.5072/FK2/BCCP9Z --upload-file dataset-delete-author-metadata.json + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/:persistentId/deleteMetadata/?persistentId=doi:10.5072/FK2/BCCP9Z" --upload-file dataset-delete-author-metadata.json For these deletes your JSON file must include an exact match of those dataset fields which you would like to delete. A sample JSON file may be downloaded here: :download:`dataset-delete-author-metadata.json <../_static/api/dataset-delete-author-metadata.json>` @@ -1246,13 +1246,13 @@ Deletes the draft version of dataset ``$ID``. Only the draft version can be dele export SERVER_URL=https://demo.dataverse.org export ID=24 - curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/$ID/versions/:draft + curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE "$SERVER_URL/api/datasets/$ID/versions/:draft" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/24/versions/:draft + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/24/versions/:draft" Set Citation Date Field Type for a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1267,13 +1267,13 @@ Note that the dataset citation date field type must be a date field. export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB export DATASET_FIELD_TYPE_NAME=dateOfDeposit - curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/:persistentId/citationdate?persistentId=$PERSISTENT_IDENTIFIER --data "$DATASET_FIELD_TYPE_NAME" + curl -H "X-Dataverse-key: $API_TOKEN" -X PUT "$SERVER_URL/api/datasets/:persistentId/citationdate?persistentId=$PERSISTENT_IDENTIFIER" --data "$DATASET_FIELD_TYPE_NAME" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT https://demo.dataverse.org/api/datasets/:persistentId/citationdate?persistentId=doi:10.5072/FK2/J8SJZB --data "dateOfDeposit" + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/:persistentId/citationdate?persistentId=doi:10.5072/FK2/J8SJZB" --data "dateOfDeposit" Revert Citation Date Field Type to Default for Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1286,13 +1286,13 @@ Restores the default citation date field type, ``:publicationDate``, for a given export SERVER_URL=https://demo.dataverse.org export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB - curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/:persistentId/citationdate?persistentId=$PERSISTENT_IDENTIFIER + curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE "$SERVER_URL/api/datasets/:persistentId/citationdate?persistentId=$PERSISTENT_IDENTIFIER" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/:persistentId/citationdate?persistentId=doi:10.5072/FK2/J8SJZB + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/:persistentId/citationdate?persistentId=doi:10.5072/FK2/J8SJZB" .. _list-roles-on-a-dataset-api: @@ -1307,13 +1307,13 @@ Lists all role assignments on a given dataset: export SERVER_URL=https://demo.dataverse.org export ID=2347 - curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/datasets/$ID/assignments + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/datasets/$ID/assignments" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/datasets/2347/assignments + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/datasets/2347/assignments" .. _assign-role-on-a-dataset-api: @@ -1328,13 +1328,13 @@ Assigns a new role, based on the POSTed JSON: export SERVER_URL=https://demo.dataverse.org export ID=2347 - curl -H X-Dataverse-key:$API_TOKEN -X POST -H "Content-Type: application/json" $SERVER_URL/api/datasets/$ID/assignments --upload-file role.json + curl -H "X-Dataverse-key:$API_TOKEN" -X POST -H "Content-Type: application/json" "$SERVER_URL/api/datasets/$ID/assignments" --upload-file role.json The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -H "Content-Type: application/json" https://demo.dataverse.org/api/datasets/2347/assignments --upload-file role.json + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -H "Content-Type: application/json" "https://demo.dataverse.org/api/datasets/2347/assignments" --upload-file role.json POSTed JSON example (the content of ``role.json`` file):: @@ -1357,13 +1357,13 @@ Delete the assignment whose id is ``$id``: export ID=2347 export ASSIGNMENT_ID=6 - curl -H X-Dataverse-key:$API_TOKEN -X DELETE $SERVER_URL/api/datasets/$ID/assignments/$ASSIGNMENT_ID + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/datasets/$ID/assignments/$ASSIGNMENT_ID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X DELETE https://demo.dataverse.org/api/datasets/2347/assignments/6 + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/2347/assignments/6" Create a Private URL for a Dataset @@ -1377,20 +1377,20 @@ Create a Private URL (must be able to manage dataset permissions): export SERVER_URL=https://demo.dataverse.org export ID=24 - curl -H "X-Dataverse-key: $API_TOKEN" -X POST $SERVER_URL/api/datasets/$ID/privateUrl + curl -H "X-Dataverse-key: $API_TOKEN" -X POST "$SERVER_URL/api/datasets/$ID/privateUrl" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/datasets/24/privateUrl + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/24/privateUrl" If Anonymized Access has been enabled on a Dataverse installation (see the :ref:`:AnonymizedFieldTypeNames` setting), an optional 'anonymizedAccess' query parameter is allowed. Setting anonymizedAccess=true in your call will create a PrivateURL that only allows an anonymized view of the Dataset (see :ref:`privateurl`). .. code-block:: bash - curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/datasets/24/privateUrl?anonymizedAccess=true + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/24/privateUrl?anonymizedAccess=true" Get the Private URL for a Dataset @@ -1404,13 +1404,13 @@ Get a Private URL from a dataset (if available): export SERVER_URL=https://demo.dataverse.org export ID=24 - curl -H "X-Dataverse-key: $API_TOKEN" $SERVER_URL/api/datasets/$ID/privateUrl + curl -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/datasets/$ID/privateUrl" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/datasets/24/privateUrl + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/datasets/24/privateUrl" Delete the Private URL from a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1423,13 +1423,13 @@ Delete a Private URL from a dataset (if it exists): export SERVER_URL=https://demo.dataverse.org export ID=24 - curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/$ID/privateUrl + curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE "$SERVER_URL/api/datasets/$ID/privateUrl" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/24/privateUrl + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/24/privateUrl" .. _add-file-api: @@ -1456,13 +1456,13 @@ In the curl example below, all of the above are specified but they are optional. export SERVER_URL=https://demo.dataverse.org export PERSISTENT_ID=doi:10.5072/FK2/J8SJZB - curl -H X-Dataverse-key:$API_TOKEN -X POST -F "file=@$FILENAME" -F 'jsonData={"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "tabIngest":"false"}' "$SERVER_URL/api/datasets/:persistentId/add?persistentId=$PERSISTENT_ID" + curl -H "X-Dataverse-key:$API_TOKEN" -X POST -F "file=@$FILENAME" -F 'jsonData={"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "tabIngest":"false"}' "$SERVER_URL/api/datasets/:persistentId/add?persistentId=$PERSISTENT_ID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -F file=@data.tsv -F 'jsonData={"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "tabIngest":"false"}' "https://demo.dataverse.org/api/datasets/:persistentId/add?persistentId=doi:10.5072/FK2/J8SJZB" + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -F file=@data.tsv -F 'jsonData={"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "tabIngest":"false"}' "https://demo.dataverse.org/api/datasets/:persistentId/add?persistentId=doi:10.5072/FK2/J8SJZB" You should expect a 201 ("CREATED") response and JSON indicating the database id that has been assigned to your newly uploaded file. @@ -1580,7 +1580,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash - curl -H X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/datasets/:persistentId/add?persistentId=doi:10.5072/FK2/J8SJZB -F 'jsonData={"description":"A remote image.","storageIdentifier":"trsa://themes/custom/qdr/images/CoreTrustSeal-logo-transparent.png","checksumType":"MD5","md5Hash":"509ef88afa907eaf2c17c1c8d8fde77e","label":"testlogo.png","fileName":"testlogo.png","mimeType":"image/png"}' + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/:persistentId/add?persistentId=doi:10.5072/FK2/J8SJZB" -F 'jsonData={"description":"A remote image.","storageIdentifier":"trsa://themes/custom/qdr/images/CoreTrustSeal-logo-transparent.png","checksumType":"MD5","md5Hash":"509ef88afa907eaf2c17c1c8d8fde77e","label":"testlogo.png","fileName":"testlogo.png","mimeType":"image/png"}' .. _cleanup-storage-api: @@ -1612,7 +1612,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash - curl -H X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X GET https://demo.dataverse.org/api/datasets/:persistentId/cleanStorage?persistentId=doi:10.5072/FK2/J8SJZB&dryrun=true + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/datasets/:persistentId/cleanStorage?persistentId=doi:10.5072/FK2/J8SJZB&dryrun=true" Adding Files To a Dataset via Other Tools ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1632,13 +1632,13 @@ Shows the combined size in bytes of all the files uploaded into the dataset ``id export SERVER_URL=https://demo.dataverse.org export ID=24 - curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/datasets/$ID/storagesize + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/datasets/$ID/storagesize" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/datasets/24/storagesize + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/datasets/24/storagesize" The size of published and unpublished files will be summed in the dataset specified. By default, only the archival files are counted - i.e., the files uploaded by users (plus the tab-delimited versions generated for tabular data files on ingest). If the optional argument ``includeCached=true`` is specified, the API will also add the sizes of all the extra files generated and cached by the Dataverse installation - the resized thumbnail versions for image files, the metadata exports for published datasets, etc. Because this deals with unpublished files the token supplied must have permission to view unpublished drafts. @@ -1656,13 +1656,13 @@ Shows the combined size in bytes of all the files available for download from ve export ID=24 export VERSIONID=1.0 - curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/datasets/$ID/versions/$VERSIONID/downloadsize + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/datasets/$ID/versions/$VERSIONID/downloadsize" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize" The size of all files available for download will be returned. If :draft is passed as versionId the token supplied must have permission to view unpublished drafts. A token is not required for published datasets. Also restricted files will be included in this total regardless of whether the user has access to download the restricted file(s). @@ -1731,13 +1731,13 @@ Creates a link between a dataset and a Dataverse collection (see :ref:`dataset-l export DATASET_ID=24 export DATAVERSE_ID=test - curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/$DATASET_ID/link/$DATAVERSE_ID + curl -H "X-Dataverse-key: $API_TOKEN" -X PUT "$SERVER_URL/api/datasets/$DATASET_ID/link/$DATAVERSE_ID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT https://demo.dataverse.org/api/datasets/24/link/test + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/24/link/test" Dataset Locks ~~~~~~~~~~~~~ @@ -1752,13 +1752,13 @@ To check if a dataset is locked: export SERVER_URL=https://demo.dataverse.org export ID=24 - curl $SERVER_URL/api/datasets/$ID/locks + curl "$SERVER_URL/api/datasets/$ID/locks" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl https://demo.dataverse.org/api/datasets/24/locks + curl "https://demo.dataverse.org/api/datasets/24/locks" Optionally, you can check if there's a lock of a specific type on the dataset: @@ -1808,13 +1808,13 @@ The following API end point will lock a Dataset with a lock of specified type. N export ID=24 export LOCK_TYPE=Ingest - curl -H "X-Dataverse-key: $API_TOKEN" -X POST $SERVER_URL/api/datasets/$ID/lock/$LOCK_TYPE + curl -H "X-Dataverse-key: $API_TOKEN" -X POST "$SERVER_URL/api/datasets/$ID/lock/$LOCK_TYPE" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/datasets/24/lock/Ingest + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/24/lock/Ingest" Use the following API to unlock the dataset, by deleting all the locks currently on the dataset. Note that this requires “superuser” credentials: @@ -1824,13 +1824,13 @@ Use the following API to unlock the dataset, by deleting all the locks currently export SERVER_URL=https://demo.dataverse.org export ID=24 - curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/$ID/locks + curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE "$SERVER_URL/api/datasets/$ID/locks" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/24/locks + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/24/locks" Or, to delete a lock of the type specified only. Note that this requires “superuser” credentials: @@ -1841,13 +1841,13 @@ Or, to delete a lock of the type specified only. Note that this requires “supe export ID=24 export LOCK_TYPE=finalizePublication - curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/$ID/locks?type=$LOCK_TYPE + curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE "$SERVER_URL/api/datasets/$ID/locks?type=$LOCK_TYPE" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/24/locks?type=finalizePublication + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/24/locks?type=finalizePublication" If the dataset is not locked (or if there is no lock of the specified type), the API will exit with a warning message. @@ -1996,13 +1996,13 @@ Delete the dataset whose id is passed: export SERVER_URL=https://demo.dataverse.org export ID=24 - curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/$ID + curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE "$SERVER_URL/api/datasets/$ID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/24 + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/24" Delete Published Dataset ~~~~~~~~~~~~~~~~~~~~~~~~ @@ -2015,13 +2015,13 @@ Normally published datasets should not be deleted, but there exists a "destroy" export SERVER_URL=https://demo.dataverse.org export PERSISTENT_ID=doi:10.5072/FK2/AAA000 - curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/:persistentId/destroy/?persistentId=$PERSISTENT_ID + curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE "$SERVER_URL/api/datasets/:persistentId/destroy/?persistentId=$PERSISTENT_ID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/:persistentId/destroy/?persistentId=doi:10.5072/FK2/AAA000 + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/:persistentId/destroy/?persistentId=doi:10.5072/FK2/AAA000" Delete with dataset identifier: @@ -2031,13 +2031,13 @@ Delete with dataset identifier: export SERVER_URL=https://demo.dataverse.org export ID=24 - curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/$ID/destroy + curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE "$SERVER_URL/api/datasets/$ID/destroy" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/24/destroy + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/24/destroy" Calling the destroy endpoint is permanent and irreversible. It will remove the dataset and its datafiles, then re-index the parent Dataverse collection in Solr. This endpoint requires the API token of a superuser. @@ -2246,13 +2246,13 @@ Example: Getting the file whose DOI is *10.5072/FK2/J8SJZB*: export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx - curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/files/:persistentId/?persistentId=$PERSISTENT_IDENTIFIER + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/files/:persistentId/?persistentId=$PERSISTENT_IDENTIFIER" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/files/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB" You may get its draft version of an unpublished file if you pass an api token with view draft permissions: @@ -2262,13 +2262,13 @@ You may get its draft version of an unpublished file if you pass an api token wi export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx - curl -H "X-Dataverse-key:$API_TOKEN" $SERVER/api/files/:persistentId/?persistentId=$PERSISTENT_IDENTIFIER + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER/api/files/:persistentId/?persistentId=$PERSISTENT_IDENTIFIER" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/files/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB" |CORS| Show the file whose id is passed: @@ -2278,13 +2278,13 @@ The fully expanded example above (without environment variables) looks like this export SERVER_URL=https://demo.dataverse.org export ID=408730 - curl $SERVER_URL/api/file/$ID + curl "$SERVER_URL/api/file/$ID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl https://demo.dataverse.org/api/files/408730 + curl "https://demo.dataverse.org/api/files/408730" You may get its draft version of an published file if you pass an api token with view draft permissions and use the draft path parameter: @@ -2294,13 +2294,13 @@ You may get its draft version of an published file if you pass an api token with export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx - curl -H "X-Dataverse-key:$API_TOKEN" $SERVER/api/files/:persistentId/draft/?persistentId=$PERSISTENT_IDENTIFIER + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER/api/files/:persistentId/draft/?persistentId=$PERSISTENT_IDENTIFIER" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/files/:persistentId/draft/?persistentId=doi:10.5072/FK2/J8SJZB + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/:persistentId/draft/?persistentId=doi:10.5072/FK2/J8SJZB" The file id can be extracted from the response retrieved from the API which uses the persistent identifier (``/api/datasets/:persistentId/?persistentId=$PERSISTENT_IDENTIFIER``). @@ -2351,13 +2351,13 @@ A curl example using an ``id`` export SERVER_URL=https://demo.dataverse.org export ID=24 - curl -H "X-Dataverse-key:$API_TOKEN" -X PUT -d true $SERVER_URL/api/files/$ID/restrict + curl -H "X-Dataverse-key:$API_TOKEN" -X PUT -d true "$SERVER_URL/api/files/$ID/restrict" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT -d true https://demo.dataverse.org/api/files/24/restrict + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT -d true "https://demo.dataverse.org/api/files/24/restrict" A curl example using a ``pid`` @@ -2367,7 +2367,7 @@ A curl example using a ``pid`` export SERVER_URL=https://demo.dataverse.org export PERSISTENT_ID=doi:10.5072/FK2/AAA000 - curl -H "X-Dataverse-key:$API_TOKEN" -X PUT -d true $SERVER_URL/api/files/:persistentId/restrict?persistentId=$PERSISTENT_ID + curl -H "X-Dataverse-key:$API_TOKEN" -X PUT -d true "$SERVER_URL/api/files/:persistentId/restrict?persistentId=$PERSISTENT_ID" The fully expanded example above (without environment variables) looks like this: @@ -2388,13 +2388,13 @@ A curl example using an ``ID``: export SERVER_URL=https://demo.dataverse.org export ID=24 - curl -H "X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/files/$ID/uningest + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/files/$ID/uningest" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/files/24/uningest + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/24/uningest" A curl example using a ``PERSISTENT_ID``: @@ -2427,13 +2427,13 @@ A curl example using an ``ID`` export SERVER_URL=https://demo.dataverse.org export ID=24 - curl -H "X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/files/$ID/reingest + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/files/$ID/reingest" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/files/24/reingest + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/24/reingest" A curl example using a ``PERSISTENT_ID`` @@ -2443,7 +2443,7 @@ A curl example using a ``PERSISTENT_ID`` export SERVER_URL=https://demo.dataverse.org export PERSISTENT_ID=doi:10.5072/FK2/AAA000 - curl -H "X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/files/:persistentId/reingest?persistentId=$PERSISTENT_ID + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/files/:persistentId/reingest?persistentId=$PERSISTENT_ID" The fully expanded example above (without environment variables) looks like this: @@ -2524,7 +2524,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/24/extractNcml + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/24/extractNcml" A curl example using a PID: @@ -2557,7 +2557,7 @@ A curl example using an ``ID`` export SERVER_URL=https://demo.dataverse.org export ID=24 - curl -H "X-Dataverse-key:$API_TOKEN" -X POST -F 'file=@file.extension' -F 'jsonData={json}' $SERVER_URL/api/files/$ID/replace + curl -H "X-Dataverse-key:$API_TOKEN" -X POST -F 'file=@file.extension' -F 'jsonData={json}' "$SERVER_URL/api/files/$ID/replace" The fully expanded example above (without environment variables) looks like this: @@ -2565,7 +2565,7 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -F 'file=@data.tsv' \ -F 'jsonData={"description":"My description.","categories":["Data"],"forceReplace":false}' \ - https://demo.dataverse.org/api/files/24/replace + "https://demo.dataverse.org/api/files/24/replace" A curl example using a ``PERSISTENT_ID`` @@ -2605,13 +2605,13 @@ A curl example using an ``ID`` export SERVER_URL=https://demo.dataverse.org export ID=24 - curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE $SERVER_URL/api/files/$ID + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/files/$ID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/files/24 + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/files/24" A curl example using a ``PERSISTENT_ID`` @@ -2642,13 +2642,13 @@ A curl example using an ``ID`` export SERVER_URL=https://demo.dataverse.org export ID=24 - curl $SERVER_URL/api/files/$ID/metadata + curl "$SERVER_URL/api/files/$ID/metadata" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl https://demo.dataverse.org/api/files/24/metadata + curl "https://demo.dataverse.org/api/files/24/metadata" A curl example using a ``PERSISTENT_ID`` @@ -2676,13 +2676,13 @@ A curl example using an ``ID`` export SERVER_URL=https://demo.dataverse.org export ID=24 - curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/files/$ID/metadata/draft + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/files/$ID/metadata/draft" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/files/24/metadata/draft + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/24/metadata/draft" A curl example using a ``PERSISTENT_ID`` @@ -2718,7 +2718,7 @@ A curl example using an ``ID`` curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \ - $SERVER_URL/api/files/$ID/metadata + "$SERVER_URL/api/files/$ID/metadata" The fully expanded example above (without environment variables) looks like this: @@ -2726,7 +2726,7 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \ - http://demo.dataverse.org/api/files/24/metadata + "http://demo.dataverse.org/api/files/24/metadata" A curl example using a ``PERSISTENT_ID`` @@ -2766,13 +2766,13 @@ A curl example using an ``ID`` export ID=24 export FILE=dct.xml - curl -H "X-Dataverse-key:$API_TOKEN" -X PUT $SERVER_URL/api/edit/$ID --upload-file $FILE + curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/edit/$ID" --upload-file $FILE The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT https://demo.dataverse.org/api/edit/24 --upload-file dct.xml + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/edit/24" --upload-file dct.xml You can download :download:`dct.xml <../../../../src/test/resources/xml/dct.xml>` from the example above to see what the XML looks like. @@ -2790,13 +2790,13 @@ A curl example using an ``ID`` export SERVER_URL=https://demo.dataverse.org export ID=24 - curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/files/$ID/prov-json + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/files/$ID/prov-json" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/files/24/prov-json + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/24/prov-json" A curl example using a ``PERSISTENT_ID`` @@ -2825,13 +2825,13 @@ A curl example using an ``ID`` export SERVER_URL=https://demo.dataverse.org export ID=24 - curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/files/$ID/prov-freeform + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/files/$ID/prov-freeform" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/files/24/prov-freeform + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/24/prov-freeform" A curl example using a ``PERSISTENT_ID`` @@ -2862,7 +2862,7 @@ A curl example using an ``ID`` export ENTITY_NAME="..." export FILE_PATH=provenance.json - curl -H "X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/files/$ID/prov-json?entityName=$ENTITY_NAME -H "Content-type:application/json" --upload-file $FILE_PATH + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/files/$ID/prov-json?entityName=$ENTITY_NAME" -H "Content-type:application/json" --upload-file $FILE_PATH The fully expanded example above (without environment variables) looks like this: @@ -2902,13 +2902,13 @@ A curl example using an ``ID`` export ID=24 export FILE_PATH=provenance.json - curl -H "X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/files/$ID/prov-freeform -H "Content-type:application/json" --upload-file $FILE_PATH + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/files/$ID/prov-freeform" -H "Content-type:application/json" --upload-file $FILE_PATH The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/files/24/prov-freeform -H "Content-type:application/json" --upload-file provenance.json + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/24/prov-freeform" -H "Content-type:application/json" --upload-file provenance.json A curl example using a ``PERSISTENT_ID`` @@ -2940,13 +2940,13 @@ A curl example using an ``ID`` export SERVER_URL=https://demo.dataverse.org export ID=24 - curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE $SERVER_URL/api/files/$ID/prov-json + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/files/$ID/prov-json" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/files/24/prov-json + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/files/24/prov-json" A curl example using a ``PERSISTENT_ID`` @@ -2975,7 +2975,7 @@ Starting with the release 4.10 the size of the saved original file (for an inges export SERVER_URL=https://localhost - curl $SERVER_URL/api/admin/datafiles/integrity/fixmissingoriginalsizes + curl "$SERVER_URL/api/admin/datafiles/integrity/fixmissingoriginalsizes" with limit parameter: @@ -2990,13 +2990,13 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash - curl https://localhost/api/admin/datafiles/integrity/fixmissingoriginalsizes" + curl "https://localhost/api/admin/datafiles/integrity/fixmissingoriginalsizes" with limit parameter: .. code-block:: bash - curl https://localhost/api/admin/datafiles/integrity/fixmissingoriginalsizes?limit=10" + curl "https://localhost/api/admin/datafiles/integrity/fixmissingoriginalsizes?limit=10" Note the optional "limit" parameter. Without it, the API will attempt to populate the sizes for all the saved originals that don't have them in the database yet. Otherwise it will do so for the first N such datafiles. @@ -3018,7 +3018,7 @@ The response is a JSON object described in the :doc:`/api/external-tools` sectio export FILEMETADATA_ID=1 export TOOL_ID=1 - curl -H "X-Dataverse-key: $API_TOKEN" -H "Accept:application/json" "$SERVER_URL/api/files/$FILE_ID/metadata/$FILEMETADATA_ID/toolparams/$TOOL_ID + curl -H "X-Dataverse-key: $API_TOKEN" -H "Accept:application/json" "$SERVER_URL/api/files/$FILE_ID/metadata/$FILEMETADATA_ID/toolparams/$TOOL_ID" .. _get-fixity-algorithm: @@ -3033,7 +3033,7 @@ This algorithm will be used when the Dataverse software manages a file upload an export SERVER_URL=https://demo.dataverse.org - curl "$SERVER_URL/api/files/fixityAlgorithm + curl "$SERVER_URL/api/files/fixityAlgorithm" Users Token Management @@ -3046,21 +3046,21 @@ Find a Token's Expiration Date In order to obtain the expiration date of a token use:: - curl -H X-Dataverse-key:$API_TOKEN -X GET $SERVER_URL/api/users/token + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/users/token" Recreate a Token ~~~~~~~~~~~~~~~~ In order to obtain a new token use:: - curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/users/token/recreate + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/users/token/recreate" Delete a Token ~~~~~~~~~~~~~~ In order to delete a token use:: - curl -H X-Dataverse-key:$API_TOKEN -X DELETE $SERVER_URL/api/users/token + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/users/token" @@ -3130,13 +3130,13 @@ A curl example using an ``ID`` export SERVER_URL=https://demo.dataverse.org export ID=24 - curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE $SERVER_URL/api/roles/$ID + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/roles/$ID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/roles/24 + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/roles/24" A curl example using a Role alias ``ALIAS`` @@ -3152,7 +3152,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/roles/:alias?alias=roleAlias + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/roles/:alias?alias=roleAlias" Explicit Groups @@ -3245,13 +3245,13 @@ Show Dataverse Software Version and Build Number export SERVER_URL=https://demo.dataverse.org - curl $SERVER_URL/api/info/version + curl "$SERVER_URL/api/info/version" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl https://demo.dataverse.org/api/info/version + curl "https://demo.dataverse.org/api/info/version" Show Dataverse Installation Server Name ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3264,13 +3264,13 @@ Get the server name. This is useful when a Dataverse installation is composed of export SERVER_URL=https://demo.dataverse.org - curl $SERVER_URL/api/info/server + curl "$SERVER_URL/api/info/server" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl https://demo.dataverse.org/api/info/server + curl "https://demo.dataverse.org/api/info/server" Show Custom Popup Text for Publishing Datasets ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3283,13 +3283,13 @@ For now, only the value for the :ref:`:DatasetPublishPopupCustomText` setting fr export SERVER_URL=https://demo.dataverse.org - curl $SERVER_URL/api/info/settings/:DatasetPublishPopupCustomText + curl "$SERVER_URL/api/info/settings/:DatasetPublishPopupCustomText" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl https://demo.dataverse.org/api/info/settings/:DatasetPublishPopupCustomText + curl "https://demo.dataverse.org/api/info/settings/:DatasetPublishPopupCustomText" Get API Terms of Use URL ~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3302,13 +3302,13 @@ Get API Terms of Use. The response contains the text value inserted as API Terms export SERVER_URL=https://demo.dataverse.org - curl $SERVER_URL/api/info/apiTermsOfUse + curl "$SERVER_URL/api/info/apiTermsOfUse" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl https://demo.dataverse.org/api/info/apiTermsOfUse + curl "https://demo.dataverse.org/api/info/apiTermsOfUse" .. _info-incomplete-metadata: @@ -3322,13 +3322,13 @@ See also :ref:`create-dataset-command` and :ref:`dataverse.api.allow-incomplete- export SERVER_URL=https://demo.dataverse.org - curl $SERVER_URL/api/info/settings/incompleteMetadataViaApi + curl "$SERVER_URL/api/info/settings/incompleteMetadataViaApi" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl https://demo.dataverse.org/api/info/settings/incompleteMetadataViaApi + curl "https://demo.dataverse.org/api/info/settings/incompleteMetadataViaApi" .. _metadata-blocks-api: @@ -3347,13 +3347,13 @@ Show Info About All Metadata Blocks export SERVER_URL=https://demo.dataverse.org - curl $SERVER_URL/api/metadatablocks + curl "$SERVER_URL/api/metadatablocks" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl https://demo.dataverse.org/api/metadatablocks + curl "https://demo.dataverse.org/api/metadatablocks" Show Info About Single Metadata Block ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3365,13 +3365,13 @@ Show Info About Single Metadata Block export SERVER_URL=https://demo.dataverse.org export IDENTIFIER=citation - curl $SERVER_URL/api/metadatablocks/$IDENTIFIER + curl "$SERVER_URL/api/metadatablocks/$IDENTIFIER" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl https://demo.dataverse.org/api/metadatablocks/citation + curl "https://demo.dataverse.org/api/metadatablocks/citation" .. _Notifications: @@ -3387,7 +3387,7 @@ Each user can get a dump of their notifications by passing in their API token: .. code-block:: bash - curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/notifications/all + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/notifications/all" Delete Notification by User ~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3398,7 +3398,7 @@ Each user can delete notifications by passing in their API token and specifying export NOTIFICATION_ID=555 - curl -H X-Dataverse-key:$API_TOKEN -X DELETE "$SERVER_URL/api/notifications/$NOTIFICATION_ID" + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/notifications/$NOTIFICATION_ID" Get All Muted In-app Notifications by User ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3407,7 +3407,7 @@ Each user can get a list of their muted in-app notification types by passing in .. code-block:: bash - curl -H X-Dataverse-key:$API_TOKEN -X GET "$SERVER_URL/api/notifications/mutedNotifications" + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/notifications/mutedNotifications" Mute In-app Notification by User ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3418,7 +3418,7 @@ Each user can mute in-app notifications by passing in their API token and specif export NOTIFICATION_TYPE=ASSIGNROLE - curl -H X-Dataverse-key:$API_TOKEN -X PUT "$SERVER_URL/api/notifications/mutedNotifications/$NOTIFICATION_TYPE" + curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/notifications/mutedNotifications/$NOTIFICATION_TYPE" Unmute In-app Notification by User ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3429,7 +3429,7 @@ Each user can unmute in-app notifications by passing in their API token and spec export NOTIFICATION_TYPE=ASSIGNROLE - curl -H X-Dataverse-key:$API_TOKEN -X DELETE "$SERVER_URL/api/notifications/mutedNotifications/$NOTIFICATION_TYPE" + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/notifications/mutedNotifications/$NOTIFICATION_TYPE" Get All Muted Email Notifications by User ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3438,7 +3438,7 @@ Each user can get a list of their muted email notification types by passing in t .. code-block:: bash - curl -H X-Dataverse-key:$API_TOKEN -X GET "$SERVER_URL/api/notifications/mutedEmails" + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/notifications/mutedEmails" Mute Email Notification by User ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3449,7 +3449,7 @@ Each user can mute email notifications by passing in their API token and specify export NOTIFICATION_TYPE=ASSIGNROLE - curl -H X-Dataverse-key:$API_TOKEN -X PUT "$SERVER_URL/api/notifications/mutedEmails/$NOTIFICATION_TYPE" + curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/notifications/mutedEmails/$NOTIFICATION_TYPE" Unmute Email Notification by User ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3460,7 +3460,7 @@ Each user can unmute email notifications by passing in their API token and speci export NOTIFICATION_TYPE=ASSIGNROLE - curl -H X-Dataverse-key:$API_TOKEN -X DELETE "$SERVER_URL/api/notifications/mutedEmails/$NOTIFICATION_TYPE" + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/notifications/mutedEmails/$NOTIFICATION_TYPE" .. _User Information: @@ -3472,7 +3472,7 @@ Get User Information in JSON Format Each user can get a dump of their basic information in JSON format by passing in their API token:: - curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/users/:me + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/users/:me" @@ -3519,7 +3519,7 @@ An example JSON file would look like this:: export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org - curl -H X-Dataverse-key:$API_TOKEN -X POST "$SERVER_URL/api/harvest/server/oaisets/add" --upload-file harvestset-finch.json + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/harvest/server/oaisets/add" --upload-file harvestset-finch.json The fully expanded example above (without the environment variables) looks like this: @@ -3554,7 +3554,7 @@ An example JSON file would look like this:: export SERVER_URL=https://demo.dataverse.org export SPECNAME=ffAuthor - curl -H X-Dataverse-key:$API_TOKEN -X PUT "$SERVER_URL/api/harvest/server/oaisets/$SPECNAME" --upload-file modify-harvestset-finch.json + curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/harvest/server/oaisets/$SPECNAME" --upload-file modify-harvestset-finch.json The fully expanded example above (without the environment variables) looks like this: @@ -3575,7 +3575,7 @@ To delete a harvesting set, use the set's database name. For example, to delete export SERVER_URL=https://demo.dataverse.org export SPECNAME=ffAuthor - curl -H X-Dataverse-key:$API_TOKEN -X DELETE "$SERVER_URL/api/harvest/server/oaisets/$SPECNAME" + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/harvest/server/oaisets/$SPECNAME" The fully expanded example above (without the environment variables) looks like this: @@ -3685,7 +3685,7 @@ Something important to keep in mind about this API is that, unlike the harvestin export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=http://localhost:8080 - curl -H X-Dataverse-key:$API_TOKEN -X POST -H "Content-Type: application/json" "$SERVER_URL/api/harvest/clients/zenodo" --upload-file client.json + curl -H "X-Dataverse-key:$API_TOKEN" -X POST -H "Content-Type: application/json" "$SERVER_URL/api/harvest/clients/zenodo" --upload-file client.json The fully expanded example above (without the environment variables) looks like this: @@ -3755,13 +3755,13 @@ Get information on a PID, especially its "state" such as "draft" or "findable". export SERVER_URL=https://demo.dataverse.org export PID=doi:10.70122/FK2/9BXT5O - curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/pids?persistentId=$PID + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/pids?persistentId=$PID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/pids?persistentId=doi:10.70122/FK2/9BXT5O + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/pids?persistentId=doi:10.70122/FK2/9BXT5O" List Unreserved PIDs ~~~~~~~~~~~~~~~~~~~~ @@ -3775,14 +3775,14 @@ Get a list of PIDs that have not been reserved on the PID provider side. This ca export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org - curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/pids/unreserved + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/pids/unreserved" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/pids/unreserved + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/pids/unreserved" Reserve a PID ~~~~~~~~~~~~~ @@ -3797,13 +3797,13 @@ Reserved a PID for a dataset. A superuser API token is required. export SERVER_URL=https://demo.dataverse.org export PID=doi:10.70122/FK2/9BXT5O - curl -H "X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/pids/:persistentId/reserve?persistentId=$PID + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/pids/:persistentId/reserve?persistentId=$PID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/pids/:persistentId/reserve?persistentId=doi:10.70122/FK2/9BXT5O + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/pids/:persistentId/reserve?persistentId=doi:10.70122/FK2/9BXT5O" Delete a PID ~~~~~~~~~~~~ @@ -3818,13 +3818,13 @@ Delete PID (this is only possible for PIDs that are in the "draft" state) and wi export SERVER_URL=https://demo.dataverse.org export PID=doi:10.70122/FK2/9BXT5O - curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE $SERVER_URL/api/pids/:persistentId/delete?persistentId=$PID + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/pids/:persistentId/delete?persistentId=$PID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X DELETE https://demo.dataverse.org/api/pids/:persistentId/delete?persistentId=doi:10.70122/FK2/9BXT5O + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/pids/:persistentId/delete?persistentId=doi:10.70122/FK2/9BXT5O" .. _admin: @@ -3874,7 +3874,7 @@ Note that HTML can be included in banner messages. Add a Banner Message:: - curl -H "Content-type:application/json" -X POST http://$SERVER/api/admin/bannerMessage --upload-file messages.json + curl -H "Content-type:application/json" -X POST "http://$SERVER/api/admin/bannerMessage" --upload-file messages.json Where ``messages.json`` looks like this:: @@ -3894,15 +3894,15 @@ Where ``messages.json`` looks like this:: Get a list of active Banner Messages:: - curl -X GET http://$SERVER/api/admin/bannerMessage + curl -X GET "http://$SERVER/api/admin/bannerMessage" Delete a Banner Message by its id:: - curl -X DELETE http://$SERVER/api/admin/bannerMessage/$id + curl -X DELETE "http://$SERVER/api/admin/bannerMessage/$id" Deactivate a Banner Message by its id (allows you to hide a message while retaining information about which users have dismissed the banner):: - curl -X PUT http://$SERVER/api/admin/bannerMessage/$id/deactivate + curl -X PUT "http://$SERVER/api/admin/bannerMessage/$id/deactivate" List Authentication Provider Factories ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3955,7 +3955,7 @@ Check whether an authentication proider is enabled:: The body of the request should be either ``true`` or ``false``. Content type has to be ``application/json``, like so:: - curl -H "Content-type: application/json" -X POST -d"false" http://localhost:8080/api/admin/authenticationProviders/echo-dignified/:enabled + curl -H "Content-type: application/json" -X POST -d"false" "http://localhost:8080/api/admin/authenticationProviders/echo-dignified/:enabled" Delete an Authentication Provider ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3986,10 +3986,10 @@ Creates a global role in the Dataverse installation. The data POSTed are assumed export SERVER_URL=https://demo.dataverse.org export ID=root - curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/admin/roles --upload-file roles.json + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/admin/roles" --upload-file roles.json ``roles.json`` see :ref:`json-representation-of-a-role` - + Delete Global Role ~~~~~~~~~~~~~~~~~~ @@ -4001,13 +4001,13 @@ A curl example using an ``ID`` export SERVER_URL=https://demo.dataverse.org export ID=24 - curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE $SERVER_URL/api/admin/roles/$ID + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/admin/roles/$ID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/admin/roles/24 + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/admin/roles/24" A curl example using a Role alias ``ALIAS`` @@ -4023,7 +4023,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/admin/roles/:alias?alias=roleAlias + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/admin/roles/:alias?alias=roleAlias" List Users ~~~~~~~~~~ @@ -4041,7 +4041,7 @@ List users with the options to search and "page" through results. Only accessibl export SERVER_URL=https://demo.dataverse.org export ID=24 - curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/admin/list-users + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/admin/list-users" # sort by createdtime (the creation time of the account) curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/admin/list-users?sortKey=createdtime" @@ -4050,7 +4050,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/admin/list-users + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/admin/list-users" # sort by createdtime (the creation time of the account) curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/admin/list-users?sortKey=createdtime" @@ -4201,7 +4201,7 @@ If a user has created multiple accounts and has been performed actions under bot POST https://$SERVER/api/users/$toMergeIdentifier/mergeIntoUser/$continuingIdentifier -Example: ``curl -H "X-Dataverse-key: $API_TOKEN" -X POST http://demo.dataverse.org/api/users/jsmith2/mergeIntoUser/jsmith`` +Example: ``curl -H "X-Dataverse-key: $API_TOKEN" -X POST "http://demo.dataverse.org/api/users/jsmith2/mergeIntoUser/jsmith"`` This action moves account data from jsmith2 into the account jsmith and deletes the account of jsmith2. @@ -4216,7 +4216,7 @@ Changes identifier for user in ``AuthenticatedUser``, ``BuiltinUser``, ``Authent POST http://$SERVER/api/users/$oldIdentifier/changeIdentifier/$newIdentifier -Example: ``curl -H "X-Dataverse-key: $API_TOKEN" -X POST https://demo.dataverse.org/api/users/johnsmith/changeIdentifier/jsmith`` +Example: ``curl -H "X-Dataverse-key: $API_TOKEN" -X POST "https://demo.dataverse.org/api/users/johnsmith/changeIdentifier/jsmith"`` This action changes the identifier of user johnsmith to jsmith. @@ -4256,13 +4256,13 @@ Deactivates a user. A superuser API token is not required but the command will o export SERVER_URL=http://localhost:8080 export USERNAME=jdoe - curl -X POST $SERVER_URL/api/admin/authenticatedUsers/$USERNAME/deactivate + curl -X POST "$SERVER_URL/api/admin/authenticatedUsers/$USERNAME/deactivate" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -X POST http://localhost:8080/api/admin/authenticatedUsers/jdoe/deactivate + curl -X POST "http://localhost:8080/api/admin/authenticatedUsers/jdoe/deactivate" The database ID of the user can be passed instead of the username. @@ -4271,7 +4271,7 @@ The database ID of the user can be passed instead of the username. export SERVER_URL=http://localhost:8080 export USERID=42 - curl -X POST $SERVER_URL/api/admin/authenticatedUsers/id/$USERID/deactivate + curl -X POST "$SERVER_URL/api/admin/authenticatedUsers/id/$USERID/deactivate" Note: A primary purpose of most Dataverse installations is to serve an archive. In the archival space, there are best practices around the tracking of data access and the tracking of modifications to data and metadata. In support of these key workflows, a simple mechanism to delete users that have performed edit or access actions in the system is not provided. Providing a Deactivate User endpoint for users who have taken certain actions in the system alongside a Delete User endpoint to remove users that haven't taken certain actions in the system is by design. @@ -4310,13 +4310,13 @@ Show the traces that the user has left in the system, such as datasets created, export SERVER_URL=https://demo.dataverse.org export USERNAME=jdoe - curl -H "X-Dataverse-key:$API_TOKEN" -X GET $SERVER_URL/api/users/$USERNAME/traces + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/users/$USERNAME/traces" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X GET https://demo.dataverse.org/api/users/jdoe/traces + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/users/jdoe/traces" Remove All Roles from a User ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -4331,13 +4331,13 @@ Removes all roles from the user. This is equivalent of clicking the "Remove All export SERVER_URL=https://demo.dataverse.org export USERNAME=jdoe - curl -H "X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/users/$USERNAME/removeRoles + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/users/$USERNAME/removeRoles" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST http://localhost:8080/api/users/jdoe/removeRoles + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "http://localhost:8080/api/users/jdoe/removeRoles" List Role Assignments of a Role Assignee ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -4401,11 +4401,11 @@ Datafile Integrity Recalculate the check sum value value of a datafile, by supplying the file's database id and an algorithm (Valid values for $ALGORITHM include MD5, SHA-1, SHA-256, and SHA-512):: - curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/admin/computeDataFileHashValue/{fileId}/algorithm/$ALGORITHM + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/admin/computeDataFileHashValue/{fileId}/algorithm/$ALGORITHM" Validate an existing check sum value against one newly calculated from the saved file:: - curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/admin/validateDataFileHashValue/{fileId} + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/admin/validateDataFileHashValue/{fileId}" .. _dataset-files-validation-api: @@ -4418,7 +4418,7 @@ The following validates all the physical files in the dataset specified, by reca It will report the specific files that have failed the validation. For example:: - curl http://localhost:8080/api/admin/validate/dataset/files/:persistentId/?persistentId=doi:10.5072/FK2/XXXXX + curl "http://localhost:8080/api/admin/validate/dataset/files/:persistentId/?persistentId=doi:10.5072/FK2/XXXXX" {"dataFiles": [ {"datafileId":2658,"storageIdentifier":"file://123-aaa","status":"valid"}, {"datafileId":2659,"storageIdentifier":"file://123-bbb","status":"invalid","errorMessage":"Checksum mismatch for datafile id 2669"}, @@ -4444,8 +4444,8 @@ Allowed values are MD5, SHA-1, SHA-256, and SHA-512 export ALG=SHA-256 export BATCHSIZE=1 - curl http://localhost:8080/api/admin/updateHashValues/$ALG - curl http://localhost:8080/api/admin/updateHashValues/$ALG?num=$BATCHSIZE + curl "http://localhost:8080/api/admin/updateHashValues/$ALG" + curl "http://localhost:8080/api/admin/updateHashValues/$ALG?num=$BATCHSIZE" .. _dataset-validation-api: @@ -4455,7 +4455,7 @@ Dataset Validation Validate the dataset and its components (DatasetVersion, FileMetadatas, etc.) for constraint violations:: - curl $SERVER_URL/api/admin/validate/dataset/{datasetId} + curl "$SERVER_URL/api/admin/validate/dataset/{datasetId}" if validation fails, will report the specific database entity and the offending value. For example:: @@ -4465,7 +4465,7 @@ If the optional argument ``variables=true`` is specified, the API will also vali Validate all the datasets in the Dataverse installation, report any constraint violations found:: - curl $SERVER_URL/api/admin/validate/datasets + curl "$SERVER_URL/api/admin/validate/datasets" If the optional argument ``variables=true`` is specified, the API will also validate the metadata associated with any tabular data files. (For example: an invalid or empty variable name). Note that validating all the tabular metadata may significantly increase the run time of the full validation pass. @@ -4574,14 +4574,14 @@ View the list of standard license terms that can be selected for a dataset: .. code-block:: bash export SERVER_URL=https://demo.dataverse.org - curl $SERVER_URL/api/licenses + curl "$SERVER_URL/api/licenses" View the details of the standard license with the database ID specified in ``$ID``: .. code-block:: bash export ID=1 - curl $SERVER_URL/api/licenses/$ID + curl "$SERVER_URL/api/licenses/$ID" Superusers can add a new license by posting a JSON file adapted from this example :download:`add-license.json <../_static/api/add-license.json>`. The ``name`` and ``uri`` of the new license must be unique. Sort order field is mandatory. If you are interested in adding a Creative Commons license, you are encouarged to use the JSON files under :ref:`adding-creative-commons-licenses`: @@ -4589,33 +4589,33 @@ Superusers can add a new license by posting a JSON file adapted from this exampl .. code-block:: bash export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx - curl -X POST -H 'Content-Type: application/json' -H X-Dataverse-key:$API_TOKEN --data-binary @add-license.json $SERVER_URL/api/licenses + curl -X POST -H 'Content-Type: application/json' -H "X-Dataverse-key:$API_TOKEN" --data-binary @add-license.json "$SERVER_URL/api/licenses" Superusers can change whether an existing license is active (usable for new dataset versions) or inactive (only allowed on already-published versions) specified by the license ``$ID``: .. code-block:: bash export STATE=true - curl -X PUT -H 'Content-Type: application/json' -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/licenses/$ID/:active/$STATE + curl -X PUT -H 'Content-Type: application/json' -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/licenses/$ID/:active/$STATE" Superusers may change the default license by specifying the license ``$ID``: .. code-block:: bash - curl -X PUT -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/licenses/default/$ID + curl -X PUT -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/licenses/default/$ID" Superusers can delete a license, provided it is not in use, by the license ``$ID``: .. code-block:: bash - curl -X DELETE -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/licenses/$ID + curl -X DELETE -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/licenses/$ID" Superusers can change the sorting order of a license specified by the license ``$ID``: .. code-block:: bash export SORT_ORDER=100 - curl -X PUT -H 'Content-Type: application/json' -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/licenses/$ID/:sortOrder/$SORT_ORDER + curl -X PUT -H 'Content-Type: application/json' -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/licenses/$ID/:sortOrder/$SORT_ORDER" List Dataset Templates ~~~~~~~~~~~~~~~~~~~~~~ @@ -4639,13 +4639,13 @@ A curl example using an ``ID`` export SERVER_URL=https://demo.dataverse.org export ID=24 - curl -X DELETE $SERVER_URL/api/admin/template/$ID + curl -X DELETE "$SERVER_URL/api/admin/template/$ID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -X DELETE https://demo.dataverse.org/api/admin/template/24 + curl -X DELETE "https://demo.dataverse.org/api/admin/template/24" .. _api-native-signed-url: @@ -4672,7 +4672,7 @@ A curl example using allowing access to a dataset's metadata export API_KEY=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export JSON='{"url":"https://demo.dataverse.org/api/v1/datasets/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB","timeOut":5,"user":"alberteinstein"}' - curl -H "X-Dataverse-key:$API_KEY" -H 'Content-Type:application/json' -d "$JSON" $SERVER_URL/api/admin/requestSignedUrl + curl -H "X-Dataverse-key:$API_KEY" -H 'Content-Type:application/json' -d "$JSON" "$SERVER_URL/api/admin/requestSignedUrl" Please see :ref:`dataverse.api.signature-secret` for the configuration option to add a shared secret, enabling extra security. @@ -4700,7 +4700,7 @@ A curl example using an ``ID`` export SERVER_URL=http://localhost export JSON='{"targetId":24, "subject":"Data Question", "body":"Please help me understand your data. Thank you!", "fromEmail":"dataverseSupport@mailinator.com"}' - curl -X POST -H 'Content-Type:application/json' -d "$JSON" $SERVER_URL/api/admin/feedback + curl -X POST -H 'Content-Type:application/json' -d "$JSON" "$SERVER_URL/api/admin/feedback" Note that this call could be useful in coordinating with dataset authors (assuming they are also contacts) as an alternative/addition to the functionality provided by :ref:`return-a-dataset`. diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py index 736d86cacf5..2c2ddf1bdf6 100755 --- a/doc/sphinx-guides/source/conf.py +++ b/doc/sphinx-guides/source/conf.py @@ -66,9 +66,9 @@ # built documents. # # The short X.Y version. -version = '5.13' +version = '5.14' # The full version, including alpha/beta/rc tags. -release = '5.13' +release = '5.14' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/sphinx-guides/source/container/base-image.rst b/doc/sphinx-guides/source/container/base-image.rst index ed06314ef0e..1a47a8fc413 100644 --- a/doc/sphinx-guides/source/container/base-image.rst +++ b/doc/sphinx-guides/source/container/base-image.rst @@ -41,7 +41,7 @@ Image Contents The base image provides: -- `Eclipse Temurin JRE using Java 11 `_ +- `Eclipse Temurin JRE using Java 17 `_ - `Payara Community Application Server `_ - CLI tools necessary to run Dataverse (i. e. ``curl`` or ``jq`` - see also :doc:`../installation/prerequisites` in Installation Guide) - Linux tools for analysis, monitoring and so on diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 3fbe55766d5..04c7eba7913 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -9,17 +9,7 @@ Please note! This Docker setup is not for production! Quickstart ---------- -First, install Java 11 and Maven. - -After cloning the repo, try this: - -``mvn -Pct clean package docker:run`` - -After some time you should be able to log in: - -- url: http://localhost:8080 -- username: dataverseAdmin -- password: admin1 +See :ref:`container-dev-quickstart`. Intro ----- @@ -172,7 +162,7 @@ restart the application container: Using ``docker container inspect dev_dataverse | grep Image`` you can verify the changed checksums. -Using A Debugger +Using a Debugger ---------------- The :doc:`base-image` enables usage of the `Java Debugging Wire Protocol `_ @@ -183,3 +173,8 @@ There are a lot of tutorials how to connect your IDE's debugger to a remote endp as the endpoint. Here are links to the most common IDEs docs on remote debugging: `Eclipse `_, `IntelliJ `_ + +Building Your Own Base Image +---------------------------- + +If you find yourself tasked with upgrading Payara, you will need to create your own base image before running the :ref:`container-dev-quickstart`. For instructions, see :doc:`base-image`. diff --git a/doc/sphinx-guides/source/developers/big-data-support.rst b/doc/sphinx-guides/source/developers/big-data-support.rst index b238a7623eb..04885571a01 100644 --- a/doc/sphinx-guides/source/developers/big-data-support.rst +++ b/doc/sphinx-guides/source/developers/big-data-support.rst @@ -173,6 +173,8 @@ See also :ref:`Globus settings <:GlobusBasicToken>`. Data Capture Module (DCM) ------------------------- +Please note: The DCM feature is deprecated. + Data Capture Module (DCM) is an experimental component that allows users to upload large datasets via rsync over ssh. DCM was developed and tested using Glassfish but these docs have been updated with references to Payara. @@ -209,7 +211,7 @@ The JSON that a DCM sends to your Dataverse installation on successful checksum :language: json - ``status`` - The valid strings to send are ``validation passed`` and ``validation failed``. -- ``uploadFolder`` - This is the directory on disk where your Dataverse installation should attempt to find the files that a DCM has moved into place. There should always be a ``files.sha`` file and a least one data file. ``files.sha`` is a manifest of all the data files and their checksums. The ``uploadFolder`` directory is inside the directory where data is stored for the dataset and may have the same name as the "identifier" of the persistent id (DOI or Handle). For example, you would send ``"uploadFolder": "DNXV2H"`` in the JSON file when the absolute path to this directory is ``/usr/local/payara5/glassfish/domains/domain1/files/10.5072/FK2/DNXV2H/DNXV2H``. +- ``uploadFolder`` - This is the directory on disk where your Dataverse installation should attempt to find the files that a DCM has moved into place. There should always be a ``files.sha`` file and a least one data file. ``files.sha`` is a manifest of all the data files and their checksums. The ``uploadFolder`` directory is inside the directory where data is stored for the dataset and may have the same name as the "identifier" of the persistent id (DOI or Handle). For example, you would send ``"uploadFolder": "DNXV2H"`` in the JSON file when the absolute path to this directory is ``/usr/local/payara6/glassfish/domains/domain1/files/10.5072/FK2/DNXV2H/DNXV2H``. - ``totalSize`` - Your Dataverse installation will use this value to represent the total size in bytes of all the files in the "package" that's created. If 360 data files and one ``files.sha`` manifest file are in the ``uploadFolder``, this value is the sum of the 360 data files. @@ -231,9 +233,9 @@ Add Dataverse Installation settings to use mock (same as using DCM, noted above) At this point you should be able to download a placeholder rsync script. Your Dataverse installation is then waiting for news from the DCM about if checksum validation has succeeded or not. First, you have to put files in place, which is usually the job of the DCM. You should substitute "X1METO" for the "identifier" of the dataset you create. You must also use the proper path for where you store files in your dev environment. -- ``mkdir /usr/local/payara5/glassfish/domains/domain1/files/10.5072/FK2/X1METO`` -- ``mkdir /usr/local/payara5/glassfish/domains/domain1/files/10.5072/FK2/X1METO/X1METO`` -- ``cd /usr/local/payara5/glassfish/domains/domain1/files/10.5072/FK2/X1METO/X1METO`` +- ``mkdir /usr/local/payara6/glassfish/domains/domain1/files/10.5072/FK2/X1METO`` +- ``mkdir /usr/local/payara6/glassfish/domains/domain1/files/10.5072/FK2/X1METO/X1METO`` +- ``cd /usr/local/payara6/glassfish/domains/domain1/files/10.5072/FK2/X1METO/X1METO`` - ``echo "hello" > file1.txt`` - ``shasum file1.txt > files.sha`` @@ -248,104 +250,11 @@ The following low level command should only be used when troubleshooting the "im ``curl -H "X-Dataverse-key: $API_TOKEN" -X POST "$DV_BASE_URL/api/batch/jobs/import/datasets/files/$DATASET_DB_ID?uploadFolder=$UPLOAD_FOLDER&totalSize=$TOTAL_SIZE"`` -Steps to set up a DCM via Docker for Development -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -If you need a fully operating DCM client for development purposes, these steps will guide you to setting one up. This includes steps to set up the DCM on S3 variant. - -Docker Image Set-up -^^^^^^^^^^^^^^^^^^^ - -See https://github.com/IQSS/dataverse/blob/develop/conf/docker-dcm/readme.md - -- Install docker if you do not have it - -Optional steps for setting up the S3 Docker DCM Variant -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -- Before: the default bucket for DCM to hold files in S3 is named test-dcm. It is coded into `post_upload_s3.bash` (line 30). Change to a different bucket if needed. -- Also Note: With the new support for multiple file store in the Dataverse Software, DCM requires a store with id="s3" and DCM will only work with this store. - - - Add AWS bucket info to dcmsrv - - Add AWS credentials to ``~/.aws/credentials`` - - - ``[default]`` - - ``aws_access_key_id =`` - - ``aws_secret_access_key =`` - -- Dataverse installation configuration (on dvsrv): - - - Set S3 as the storage driver - - - ``cd /opt/payara5/bin/`` - - ``./asadmin delete-jvm-options "\-Ddataverse.files.storage-driver-id=file"`` - - ``./asadmin create-jvm-options "\-Ddataverse.files.storage-driver-id=s3"`` - - ``./asadmin create-jvm-options "\-Ddataverse.files.s3.type=s3"`` - - ``./asadmin create-jvm-options "\-Ddataverse.files.s3.label=s3"`` - - - - Add AWS bucket info to your Dataverse installation - - Add AWS credentials to ``~/.aws/credentials`` - - - ``[default]`` - - ``aws_access_key_id =`` - - ``aws_secret_access_key =`` - - - Also: set region in ``~/.aws/config`` to create a region file. Add these contents: - - - ``[default]`` - - ``region = us-east-1`` - - - Add the S3 bucket names to your Dataverse installation - - - S3 bucket for your Dataverse installation - - - ``/usr/local/payara5/glassfish/bin/asadmin create-jvm-options "-Ddataverse.files.s3.bucket-name=iqsstestdcmbucket"`` - - - S3 bucket for DCM (as your Dataverse installation needs to do the copy over) - - - ``/usr/local/payara5/glassfish/bin/asadmin create-jvm-options "-Ddataverse.files.dcm-s3-bucket-name=test-dcm"`` - - - Set download method to be HTTP, as DCM downloads through S3 are over this protocol ``curl -X PUT "http://localhost:8080/api/admin/settings/:DownloadMethods" -d "native/http"`` - -Using the DCM Docker Containers -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -For using these commands, you will need to connect to the shell prompt inside various containers (e.g. ``docker exec -it dvsrv /bin/bash``) - -- Create a dataset and download rsync upload script - - - connect to client container: ``docker exec -it dcm_client bash`` - - create dataset: ``cd /mnt ; ./create.bash`` ; this will echo the database ID to stdout - - download transfer script: ``./get_transfer.bash $database_id_from_create_script`` - - execute the transfer script: ``bash ./upload-${database_id_from-create_script}.bash`` , and follow instructions from script. - -- Run script - - - e.g. ``bash ./upload-3.bash`` (``3`` being the database id from earlier commands in this example). - -- Manually run post upload script on dcmsrv - - - for posix implementation: ``docker exec -it dcmsrv /opt/dcm/scn/post_upload.bash`` - - for S3 implementation: ``docker exec -it dcmsrv /opt/dcm/scn/post_upload_s3.bash`` - -Additional DCM docker development tips -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -- You can completely blow away all the docker images with these commands (including non DCM ones!) - - ``docker-compose -f docmer-compose.yml down -v`` - -- There are a few logs to tail - - - dvsrv : ``tail -n 2000 -f /opt/payara5/glassfish/domains/domain1/logs/server.log`` - - dcmsrv : ``tail -n 2000 -f /var/log/lighttpd/breakage.log`` - - dcmsrv : ``tail -n 2000 -f /var/log/lighttpd/access.log`` - -- You may have to restart the app server domain occasionally to deal with memory filling up. If deployment is getting reallllllly slow, its a good time. - Repository Storage Abstraction Layer (RSAL) ------------------------------------------- +Please note: The RSAL feature is deprecated. + Steps to set up a DCM via Docker for Development ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/developers/classic-dev-env.rst b/doc/sphinx-guides/source/developers/classic-dev-env.rst new file mode 100755 index 00000000000..062a1bb36f3 --- /dev/null +++ b/doc/sphinx-guides/source/developers/classic-dev-env.rst @@ -0,0 +1,266 @@ +======================= +Classic Dev Environment +======================= + +These are the old instructions we used for Dataverse 4 and 5. They should still work but these days we favor running Dataverse in Docker as described in :doc:`dev-environment`. + +These instructions are purposefully opinionated and terse to help you get your development environment up and running as quickly as possible! Please note that familiarity with running commands from the terminal is assumed. + +.. contents:: |toctitle| + :local: + +Quick Start (Docker) +-------------------- + +The quickest way to get Dataverse running is in Docker as explained in :doc:`../container/dev-usage` section of the Container Guide. + + +Classic Dev Environment +----------------------- + +Since before Docker existed, we have encouraged installing Dataverse and all its dependencies directly on your development machine, as described below. This can be thought of as the "classic" development environment for Dataverse. + +However, in 2023 we decided that we'd like to encourage all developers to start using Docker instead and opened https://github.com/IQSS/dataverse/issues/9616 to indicate that we plan to rewrite this page to recommend the use of Docker. + +There's nothing wrong with the classic instructions below and we don't plan to simply delete them. They are a valid alternative to running Dataverse in Docker. We will likely move them to another page. + +Set Up Dependencies +------------------- + +Supported Operating Systems +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Mac OS X or Linux is required because the setup scripts assume the presence of standard Unix utilities. + +Windows is gaining support through Docker as described in the :doc:`windows` section. + +Install Java +~~~~~~~~~~~~ + +The Dataverse Software requires Java 11. + +We suggest downloading OpenJDK from https://adoptopenjdk.net + +On Linux, you are welcome to use the OpenJDK available from package managers. + +Install Netbeans or Maven +~~~~~~~~~~~~~~~~~~~~~~~~~ + +NetBeans IDE is recommended, and can be downloaded from http://netbeans.org . Developers may use any editor or IDE. We recommend NetBeans because it is free, works cross platform, has good support for Jakarta EE projects, and includes a required build tool, Maven. + +Below we describe how to build the Dataverse Software war file with Netbeans but if you prefer to use only Maven, you can find installation instructions in the :doc:`tools` section. + +Install Homebrew (Mac Only) +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +On Mac, install Homebrew to simplify the steps below: https://brew.sh + +Clone the Dataverse Software Git Repo +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Fork https://github.com/IQSS/dataverse and then clone your fork like this: + +``git clone git@github.com:[YOUR GITHUB USERNAME]/dataverse.git`` + +Build the Dataverse Software War File +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If you installed Netbeans, follow these steps: + +- Launch Netbeans and click "File" and then "Open Project". Navigate to where you put the Dataverse Software code and double-click "Dataverse" to open the project. +- If you see "resolve project problems," go ahead and let Netbeans try to resolve them. This will probably including downloading dependencies, which can take a while. +- Allow Netbeans to install nb-javac (required for Java 8 and below). +- Select "Dataverse" under Projects and click "Run" in the menu and then "Build Project (Dataverse)". Check back for "BUILD SUCCESS" at the end. + +If you installed Maven instead of Netbeans, run ``mvn package``. Check for "BUILD SUCCESS" at the end. + +NOTE: Do you use a locale different than ``en_US.UTF-8`` on your development machine? Are you in a different timezone +than Harvard (Eastern Time)? You might experience issues while running tests that were written with these settings +in mind. The Maven ``pom.xml`` tries to handle this for you by setting the locale to ``en_US.UTF-8`` and timezone +``UTC``, but more, not yet discovered building or testing problems might lurk in the shadows. + +Install jq +~~~~~~~~~~ + +On Mac, run this command: + +``brew install jq`` + +On Linux, install ``jq`` from your package manager or download a binary from http://stedolan.github.io/jq/ + +Install Payara +~~~~~~~~~~~~~~ + +Payara 6.2023.8 or higher is required. + +To install Payara, run the following commands: + +``cd /usr/local`` + +``sudo curl -O -L https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2023.8/payara-6.2023.8.zip`` + +``sudo unzip payara-6.2023.8.zip`` + +``sudo chown -R $USER /usr/local/payara6`` + +If nexus.payara.fish is ever down for maintenance, Payara distributions are also available from https://repo1.maven.org/maven2/fish/payara/distributions/payara/ + +Install Service Dependencies Directly on localhost +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Install PostgreSQL +^^^^^^^^^^^^^^^^^^ + +The Dataverse Software has been tested with PostgreSQL versions up to 13. PostgreSQL version 10+ is required. + +On Mac, go to https://www.postgresql.org/download/macosx/ and choose "Interactive installer by EDB" option. Note that version 13.5 is used in the command line examples below, but the process should be similar for other versions. When prompted to set a password for the "database superuser (postgres)" just enter "password". + +After installation is complete, make a backup of the ``pg_hba.conf`` file like this: + +``sudo cp /Library/PostgreSQL/13/data/pg_hba.conf /Library/PostgreSQL/13/data/pg_hba.conf.orig`` + +Then edit ``pg_hba.conf`` with an editor such as vi: + +``sudo vi /Library/PostgreSQL/13/data/pg_hba.conf`` + +In the "METHOD" column, change all instances of "scram-sha-256" (or whatever is in that column) to "trust". This will make it so PostgreSQL doesn't require a password. + +In the Finder, click "Applications" then "PostgreSQL 13" and launch the "Reload Configuration" app. Click "OK" after you see "server signaled". + +Next, to confirm the edit worked, launch the "pgAdmin" application from the same folder. Under "Browser", expand "Servers" and double click "PostgreSQL 13". When you are prompted for a password, leave it blank and click "OK". If you have successfully edited "pg_hba.conf", you can get in without a password. + +On Linux, you should just install PostgreSQL using your favorite package manager, such as ``yum``. (Consult the PostgreSQL section of :doc:`/installation/prerequisites` in the main Installation guide for more info and command line examples). Find ``pg_hba.conf`` and set the authentication method to "trust" and restart PostgreSQL. + +Install Solr +^^^^^^^^^^^^ + +`Solr `_ 9.3.0 is required. + +To install Solr, execute the following commands: + +``sudo mkdir /usr/local/solr`` + +``sudo chown $USER /usr/local/solr`` + +``cd /usr/local/solr`` + +``curl -O http://archive.apache.org/dist/solr/solr/9.3.0/solr-9.3.0.tgz`` + +``tar xvfz solr-9.3.0.tgz`` + +``cd solr-9.3.0/server/solr`` + +``cp -r configsets/_default collection1`` + +``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/9.3.0/schema.xml`` + +``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/9.3.0/schema_dv_mdb_fields.xml`` + +``mv schema*.xml collection1/conf`` + +``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/9.3.0/solrconfig.xml`` + +``mv solrconfig.xml collection1/conf/solrconfig.xml`` + +``cd /usr/local/solr/solr-9.3.0`` + +(Please note that the extra jetty argument below is a security measure to limit connections to Solr to only your computer. For extra security, run a firewall.) + +``bin/solr start -j "-Djetty.host=127.0.0.1"`` + +``bin/solr create_core -c collection1 -d server/solr/collection1/conf`` + +Install Service Dependencies Using Docker Compose +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +To avoid having to install service dependencies like PostgreSQL or Solr directly on your localhost, there is the alternative of using the ``docker-compose-dev.yml`` file available in the repository root. For this option you need to have Docker and Docker Compose installed on your machine. + +The ``docker-compose-dev.yml`` can be configured to only run the service dependencies necessary to support a Dataverse installation running directly on localhost. In addition to PostgreSQL and Solr, it also runs a SMTP server. + +Before running the Docker Compose file, you need to update the value of the ``DATAVERSE_DB_USER`` environment variable to ``postgres``. The variable can be found inside the ``.env`` file in the repository root. This step is required as the Dataverse installation script expects that database user. + +To run the Docker Compose file, go to the Dataverse repository root, then run: + +``docker-compose -f docker-compose-dev.yml up -d --scale dev_dataverse=0`` + +Note that this command omits the Dataverse container defined in the Docker Compose file, since Dataverse is going to be installed directly on localhost in the next section. + +The command runs the containers in detached mode, but if you want to run them attached and thus view container logs in real time, remove the ``-d`` option from the command. + +Data volumes of each dependency will be persisted inside the ``docker-dev-volumes`` folder, inside the repository root. + +If you want to stop the containers, then run (for detached mode only, otherwise use ``Ctrl + C``): + +``docker-compose -f docker-compose-dev.yml stop`` + +If you want to remove the containers, then run: + +``docker-compose -f docker-compose-dev.yml down`` + +If you want to run a single container (the mail server, for example) then run: + +``docker-compose -f docker-compose-dev.yml up dev_smtp`` + +For a fresh installation, and before running the Software Installer Script, it is recommended to delete the docker-dev-env folder to avoid installation problems due to existing data in the containers. + +Run the Dataverse Software Installer Script +------------------------------------------- + +Navigate to the directory where you cloned the Dataverse Software git repo change directories to the ``scripts/installer`` directory like this: + +``cd scripts/installer`` + +Create a Python virtual environment, activate it, then install dependencies: + +``python3 -m venv venv`` + +``source venv/bin/activate`` + +``pip install psycopg2-binary`` + +The installer will try to connect to the SMTP server you tell it to use. If you haven't used the Docker Compose option for setting up the dependencies, or you don't have a mail server handy, you can run ``nc -l 25`` in another terminal and choose "localhost" (the default) to get past this check. + +Finally, run the installer (see also :download:`README_python.txt <../../../../scripts/installer/README_python.txt>` if necessary): + +``python3 install.py`` + +Verify the Dataverse Software is Running +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +After the script has finished, you should be able to log into your Dataverse installation with the following credentials: + +- http://localhost:8080 +- username: dataverseAdmin +- password: admin + +Configure Your Development Environment for Publishing +----------------------------------------------------- + +Run the following command: + +``curl http://localhost:8080/api/admin/settings/:DoiProvider -X PUT -d FAKE`` + +This will disable DOI registration by using a fake (in-code) DOI provider. Please note that this feature is only available in Dataverse Software 4.10+ and that at present, the UI will give no indication that the DOIs thus minted are fake. + +Developers may also wish to consider using :ref:`PermaLinks ` + +Configure Your Development Environment for GUI Edits +---------------------------------------------------- + +Out of the box, a JSF setting is configured for production use and prevents edits to the GUI (xhtml files) from being visible unless you do a full deployment. + +It is recommended that you run the following command so that simply saving the xhtml file in Netbeans is enough for the change to show up. + +``asadmin create-system-properties "dataverse.jsf.refresh-period=1"`` + +For more on JSF settings like this, see :ref:`jsf-config`. + +Next Steps +---------- + +If you can log in to the Dataverse installation, great! If not, please see the :doc:`troubleshooting` section. For further assistance, please see "Getting Help" in the :doc:`intro` section. + +You're almost ready to start hacking on code. Now that the installer script has you up and running, you need to continue on to the :doc:`tips` section to get set up to deploy code from your IDE or the command line. + +---- + +Previous: :doc:`intro` | Next: :doc:`tips` diff --git a/doc/sphinx-guides/source/developers/containers.rst b/doc/sphinx-guides/source/developers/containers.rst index b42f7f5a2e2..175b178b455 100755 --- a/doc/sphinx-guides/source/developers/containers.rst +++ b/doc/sphinx-guides/source/developers/containers.rst @@ -25,11 +25,6 @@ The primary community-lead projects (which the core team is drawing inspiration - https://github.com/IQSS/dataverse-docker - https://github.com/IQSS/dataverse-kubernetes (especially the https://github.com/EOSC-synergy/dataverse-kubernetes fork) -Deprecated Projects -------------------- - -The :doc:`testing` section mentions using docker-aio for integration tests. We do not plan to keep this project alive. - Using Containers for Reproducible Research ------------------------------------------ diff --git a/doc/sphinx-guides/source/developers/debugging.rst b/doc/sphinx-guides/source/developers/debugging.rst index 2088afe5521..50e8901b1ff 100644 --- a/doc/sphinx-guides/source/developers/debugging.rst +++ b/doc/sphinx-guides/source/developers/debugging.rst @@ -20,8 +20,8 @@ during development without recompiling. Changing the options will require at lea how you get these options in. (Variable substitution only happens during deployment and when using system properties or environment variables, you'll need to pass these into the domain, which usually will require an app server restart.) -Please note that since Payara 5.2021.1 supporting MicroProfile Config 2.0, you can -`use profiles `_ +Please note you can use +`MicroProfile Config `_ to maintain your settings more easily for different environments. .. list-table:: diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst index b3f7fb1c1af..1301994cc82 100755 --- a/doc/sphinx-guides/source/developers/dev-environment.rst +++ b/doc/sphinx-guides/source/developers/dev-environment.rst @@ -2,263 +2,81 @@ Development Environment ======================= -These instructions are purposefully opinionated and terse to help you get your development environment up and running as quickly as possible! Please note that familiarity with running commands from the terminal is assumed. +These instructions are oriented around Docker but the "classic" instructions we used for Dataverse 4 and 5 are still available at :doc:`classic-dev-env`. .. contents:: |toctitle| :local: -Quick Start (Docker) --------------------- +.. _container-dev-quickstart: -The quickest way to get Dataverse running is in Docker as explained in :doc:`../container/dev-usage` section of the Container Guide. - - -Classic Dev Environment ------------------------ - -Since before Docker existed, we have encouraged installing Dataverse and all its dependencies directly on your development machine, as described below. This can be thought of as the "classic" development environment for Dataverse. +Quickstart +---------- -However, in 2023 we decided that we'd like to encourage all developers to start using Docker instead and opened https://github.com/IQSS/dataverse/issues/9616 to indicate that we plan to rewrite this page to recommend the use of Docker. +First, install Java 17, Maven, and Docker. -There's nothing wrong with the classic instructions below and we don't plan to simply delete them. They are a valid alternative to running Dataverse in Docker. We will likely move them to another page. +After cloning the `dataverse repo `_, run this: -Set Up Dependencies -------------------- +``mvn -Pct clean package docker:run`` -Supported Operating Systems -~~~~~~~~~~~~~~~~~~~~~~~~~~~ +After some time you should be able to log in: -Mac OS X or Linux is required because the setup scripts assume the presence of standard Unix utilities. +- url: http://localhost:8080 +- username: dataverseAdmin +- password: admin1 -Windows is gaining support through Docker as described in the :doc:`windows` section. +Detailed Steps +-------------- Install Java ~~~~~~~~~~~~ -The Dataverse Software requires Java 11. +The Dataverse Software requires Java 17. -We suggest downloading OpenJDK from https://adoptopenjdk.net +On Mac and Windows, we suggest downloading OpenJDK from https://adoptium.net (formerly `AdoptOpenJDK `_) or `SDKMAN `_. On Linux, you are welcome to use the OpenJDK available from package managers. -Install Netbeans or Maven -~~~~~~~~~~~~~~~~~~~~~~~~~ +Install Maven +~~~~~~~~~~~~~ -NetBeans IDE is recommended, and can be downloaded from http://netbeans.org . Developers may use any editor or IDE. We recommend NetBeans because it is free, works cross platform, has good support for Jakarta EE projects, and includes a required build tool, Maven. +Follow instructions at https://maven.apache.org -Below we describe how to build the Dataverse Software war file with Netbeans but if you prefer to use only Maven, you can find installation instructions in the :doc:`tools` section. +Install and Start Docker +~~~~~~~~~~~~~~~~~~~~~~~~ -Install Homebrew (Mac Only) -~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Follow instructions at https://www.docker.com -On Mac, install Homebrew to simplify the steps below: https://brew.sh +Be sure to start Docker. -Clone the Dataverse Software Git Repo -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Git Clone Repo +~~~~~~~~~~~~~~ Fork https://github.com/IQSS/dataverse and then clone your fork like this: ``git clone git@github.com:[YOUR GITHUB USERNAME]/dataverse.git`` -Build the Dataverse Software War File -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -If you installed Netbeans, follow these steps: - -- Launch Netbeans and click "File" and then "Open Project". Navigate to where you put the Dataverse Software code and double-click "Dataverse" to open the project. -- If you see "resolve project problems," go ahead and let Netbeans try to resolve them. This will probably including downloading dependencies, which can take a while. -- Allow Netbeans to install nb-javac (required for Java 8 and below). -- Select "Dataverse" under Projects and click "Run" in the menu and then "Build Project (Dataverse)". Check back for "BUILD SUCCESS" at the end. - -If you installed Maven instead of Netbeans, run ``mvn package``. Check for "BUILD SUCCESS" at the end. - -NOTE: Do you use a locale different than ``en_US.UTF-8`` on your development machine? Are you in a different timezone -than Harvard (Eastern Time)? You might experience issues while running tests that were written with these settings -in mind. The Maven ``pom.xml`` tries to handle this for you by setting the locale to ``en_US.UTF-8`` and timezone -``UTC``, but more, not yet discovered building or testing problems might lurk in the shadows. - -Install jq -~~~~~~~~~~ - -On Mac, run this command: - -``brew install jq`` - -On Linux, install ``jq`` from your package manager or download a binary from http://stedolan.github.io/jq/ - -Install Payara -~~~~~~~~~~~~~~ - -Payara 5.2022.3 or higher is required. - -To install Payara, run the following commands: - -``cd /usr/local`` - -``sudo curl -O -L https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/5.2022.3/payara-5.2022.3.zip`` - -``sudo unzip payara-5.2022.3.zip`` - -``sudo chown -R $USER /usr/local/payara5`` - -If nexus.payara.fish is ever down for maintenance, Payara distributions are also available from https://repo1.maven.org/maven2/fish/payara/distributions/payara/ - -Install Service Dependencies Directly on localhost -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Install PostgreSQL -^^^^^^^^^^^^^^^^^^ - -The Dataverse Software has been tested with PostgreSQL versions up to 13. PostgreSQL version 10+ is required. - -On Mac, go to https://www.postgresql.org/download/macosx/ and choose "Interactive installer by EDB" option. Note that version 13.5 is used in the command line examples below, but the process should be similar for other versions. When prompted to set a password for the "database superuser (postgres)" just enter "password". - -After installation is complete, make a backup of the ``pg_hba.conf`` file like this: - -``sudo cp /Library/PostgreSQL/13/data/pg_hba.conf /Library/PostgreSQL/13/data/pg_hba.conf.orig`` - -Then edit ``pg_hba.conf`` with an editor such as vi: - -``sudo vi /Library/PostgreSQL/13/data/pg_hba.conf`` - -In the "METHOD" column, change all instances of "scram-sha-256" (or whatever is in that column) to "trust". This will make it so PostgreSQL doesn't require a password. - -In the Finder, click "Applications" then "PostgreSQL 13" and launch the "Reload Configuration" app. Click "OK" after you see "server signaled". - -Next, to confirm the edit worked, launch the "pgAdmin" application from the same folder. Under "Browser", expand "Servers" and double click "PostgreSQL 13". When you are prompted for a password, leave it blank and click "OK". If you have successfully edited "pg_hba.conf", you can get in without a password. - -On Linux, you should just install PostgreSQL using your favorite package manager, such as ``yum``. (Consult the PostgreSQL section of :doc:`/installation/prerequisites` in the main Installation guide for more info and command line examples). Find ``pg_hba.conf`` and set the authentication method to "trust" and restart PostgreSQL. - -Install Solr -^^^^^^^^^^^^ - -`Solr `_ 8.11.1 is required. - -To install Solr, execute the following commands: - -``sudo mkdir /usr/local/solr`` - -``sudo chown $USER /usr/local/solr`` - -``cd /usr/local/solr`` - -``curl -O http://archive.apache.org/dist/lucene/solr/8.11.1/solr-8.11.1.tgz`` - -``tar xvfz solr-8.11.1.tgz`` - -``cd solr-8.11.1/server/solr`` - -``cp -r configsets/_default collection1`` +Build and Run +~~~~~~~~~~~~~ -``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/8.11.1/schema.xml`` +Change into the ``dataverse`` directory you just cloned and run the following command: -``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/8.11.1/schema_dv_mdb_fields.xml`` +``mvn -Pct clean package docker:run`` -``mv schema*.xml collection1/conf`` +Verify +~~~~~~ -``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/8.11.1/solrconfig.xml`` +After some time you should be able to log in: -``mv solrconfig.xml collection1/conf/solrconfig.xml`` - -``cd /usr/local/solr/solr-8.11.1`` - -(Please note that the extra jetty argument below is a security measure to limit connections to Solr to only your computer. For extra security, run a firewall.) - -``bin/solr start -j "-Djetty.host=127.0.0.1"`` - -``bin/solr create_core -c collection1 -d server/solr/collection1/conf`` - -Install Service Dependencies Using Docker Compose -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -To avoid having to install service dependencies like PostgreSQL or Solr directly on your localhost, there is the alternative of using the ``docker-compose-dev.yml`` file available in the repository root. For this option you need to have Docker and Docker Compose installed on your machine. - -The ``docker-compose-dev.yml`` can be configured to only run the service dependencies necessary to support a Dataverse installation running directly on localhost. In addition to PostgreSQL and Solr, it also runs a SMTP server. - -Before running the Docker Compose file, you need to update the value of the ``DATAVERSE_DB_USER`` environment variable to ``postgres``. The variable can be found inside the ``.env`` file in the repository root. This step is required as the Dataverse installation script expects that database user. - -To run the Docker Compose file, go to the Dataverse repository root, then run: - -``docker-compose -f docker-compose-dev.yml up -d --scale dev_dataverse=0`` - -Note that this command omits the Dataverse container defined in the Docker Compose file, since Dataverse is going to be installed directly on localhost in the next section. - -The command runs the containers in detached mode, but if you want to run them attached and thus view container logs in real time, remove the ``-d`` option from the command. - -Data volumes of each dependency will be persisted inside the ``docker-dev-volumes`` folder, inside the repository root. - -If you want to stop the containers, then run (for detached mode only, otherwise use ``Ctrl + C``): - -``docker-compose -f docker-compose-dev.yml stop`` - -If you want to remove the containers, then run: - -``docker-compose -f docker-compose-dev.yml down`` - -If you want to run a single container (the mail server, for example) then run: - -``docker-compose -f docker-compose-dev.yml up dev_smtp`` - -For a fresh installation, and before running the Software Installer Script, it is recommended to delete the docker-dev-env folder to avoid installation problems due to existing data in the containers. - -Run the Dataverse Software Installer Script -------------------------------------------- - -Navigate to the directory where you cloned the Dataverse Software git repo change directories to the ``scripts/installer`` directory like this: - -``cd scripts/installer`` - -Create a Python virtual environment, activate it, then install dependencies: - -``python3 -m venv venv`` - -``source venv/bin/activate`` - -``pip install psycopg2-binary`` - -The installer will try to connect to the SMTP server you tell it to use. If you haven't used the Docker Compose option for setting up the dependencies, or you don't have a mail server handy, you can run ``nc -l 25`` in another terminal and choose "localhost" (the default) to get past this check. - -Finally, run the installer (see also :download:`README_python.txt <../../../../scripts/installer/README_python.txt>` if necessary): - -``python3 install.py`` - -Verify the Dataverse Software is Running -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -After the script has finished, you should be able to log into your Dataverse installation with the following credentials: - -- http://localhost:8080 +- url: http://localhost:8080 - username: dataverseAdmin -- password: admin - -Configure Your Development Environment for Publishing ------------------------------------------------------ - -Run the following command: - -``curl http://localhost:8080/api/admin/settings/:DoiProvider -X PUT -d FAKE`` - -This will disable DOI registration by using a fake (in-code) DOI provider. Please note that this feature is only available in Dataverse Software 4.10+ and that at present, the UI will give no indication that the DOIs thus minted are fake. - -Developers may also wish to consider using :ref:`PermaLinks ` - -Configure Your Development Environment for GUI Edits ----------------------------------------------------- - -Out of the box, a JSF setting is configured for production use and prevents edits to the GUI (xhtml files) from being visible unless you do a full deployment. - -It is recommended that you run the following command so that simply saving the xhtml file in Netbeans is enough for the change to show up. - -``asadmin create-system-properties "dataverse.jsf.refresh-period=1"`` - -For more on JSF settings like this, see :ref:`jsf-config`. - -Next Steps ----------- +- password: admin1 -If you can log in to the Dataverse installation, great! If not, please see the :doc:`troubleshooting` section. For further assistance, please see "Getting Help" in the :doc:`intro` section. +More Information +---------------- -You're almost ready to start hacking on code. Now that the installer script has you up and running, you need to continue on to the :doc:`tips` section to get set up to deploy code from your IDE or the command line. +See also the :doc:`/container/dev-usage` section of the Container Guide. ----- +Getting Help +------------ -Previous: :doc:`intro` | Next: :doc:`tips` +Please feel free to reach out at https://chat.dataverse.org or https://groups.google.com/g/dataverse-dev if you have any difficulty setting up a dev environment! diff --git a/doc/sphinx-guides/source/developers/index.rst b/doc/sphinx-guides/source/developers/index.rst index d70b682fcda..c77ddc13519 100755 --- a/doc/sphinx-guides/source/developers/index.rst +++ b/doc/sphinx-guides/source/developers/index.rst @@ -41,4 +41,5 @@ Developer Guide dataset-migration-api workflows fontcustom + classic-dev-env diff --git a/doc/sphinx-guides/source/developers/intro.rst b/doc/sphinx-guides/source/developers/intro.rst index 7f4e8c1ba34..4a64c407fc1 100755 --- a/doc/sphinx-guides/source/developers/intro.rst +++ b/doc/sphinx-guides/source/developers/intro.rst @@ -52,7 +52,9 @@ Related Guides If you are a developer who wants to make use of the Dataverse Software APIs, please see the :doc:`/api/index`. If you have front-end UI questions, please see the :doc:`/style/index`. -If you are a sysadmin who likes to code, you may be interested in hacking on installation scripts mentioned in the :doc:`/installation/index`. We validate the installation scripts with :doc:`/developers/tools` such as `Vagrant `_ and Docker (see the :doc:`containers` section). +If you are a sysadmin who likes to code, you may be interested in hacking on installation scripts mentioned in the :doc:`/installation/index`. + +If you are a Docker enthusiasts, please check out the :doc:`/container/index`. Related Projects ---------------- diff --git a/doc/sphinx-guides/source/developers/make-data-count.rst b/doc/sphinx-guides/source/developers/make-data-count.rst index a3c0d10dc5e..8eaa5c0d7f8 100644 --- a/doc/sphinx-guides/source/developers/make-data-count.rst +++ b/doc/sphinx-guides/source/developers/make-data-count.rst @@ -30,15 +30,13 @@ Full Setup The recommended way to work on the Make Data Count feature is to spin up an EC2 instance that has both the Dataverse Software and Counter Processor installed. Go to the :doc:`deployment` page for details on how to spin up an EC2 instance and make sure that your Ansible file is configured to install Counter Processor before running the "create" script. -(Alternatively, you can try installing Counter Processor in Vagrant. :download:`setup-counter-processor.sh <../../../../scripts/vagrant/setup-counter-processor.sh>` might help you get it installed.) - After you have spun to your EC2 instance, set ``:MDCLogPath`` so that the Dataverse installation creates a log for Counter Processor to operate on. For more on this database setting, see the :doc:`/installation/config` section of the Installation Guide. Next you need to have the Dataverse installation add some entries to the log that Counter Processor will operate on. To do this, click on some published datasets and download some files. -Next you should run Counter Processor to convert the log into a SUSHI report, which is in JSON format. Before running Counter Processor, you need to put a configuration file into place. As a starting point use :download:`counter-processor-config.yaml <../../../../scripts/vagrant/counter-processor-config.yaml>` and edit the file, paying particular attention to the following settings: +Next you should run Counter Processor to convert the log into a SUSHI report, which is in JSON format. Before running Counter Processor, you need to put a configuration file into place. As a starting point use :download:`counter-processor-config.yaml <../_static/developers/counter-processor-config.yaml>` and edit the file, paying particular attention to the following settings: -- ``log_name_pattern`` You might want something like ``/usr/local/payara5/glassfish/domains/domain1/logs/counter_(yyyy-mm-dd).log`` +- ``log_name_pattern`` You might want something like ``/usr/local/payara6/glassfish/domains/domain1/logs/counter_(yyyy-mm-dd).log`` - ``year_month`` You should probably set this to the current month. - ``output_file`` This needs to be a directory that the "dataverse" Unix user can read but that the "counter" user can write to. In dev, you can probably get away with "/tmp" as the directory. - ``platform`` Out of the box from Counter Processor this is set to ``Dash`` but this should be changed to match the name of your Dataverse installation. Examples are "Harvard Dataverse Repository" for Harvard University or "LibraData" for the University of Virginia. diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index c228d8e20ca..acaeccf4f23 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -47,12 +47,14 @@ Writing Unit Tests with JUnit ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We are aware that there are newer testing tools such as TestNG, but we use `JUnit `_ because it's tried and true. -We support both (legacy) JUnit 4.x tests (forming the majority of our tests) and -newer JUnit 5 based testing. +We support JUnit 5 based testing and require new tests written with it. +(Since Dataverse 6.0, we migrated all of our tests formerly based on JUnit 4.) -NOTE: When adding new tests, you should give JUnit 5 a go instead of adding more dependencies to JUnit 4.x. - -If writing tests is new to you, poke around existing unit tests which all end in ``Test.java`` and live under ``src/test``. Each test is annotated with ``@Test`` and should have at least one assertion which specifies the expected result. In Netbeans, you can run all the tests in it by clicking "Run" -> "Test File". From the test file, you should be able to navigate to the code that's being tested by right-clicking on the file and clicking "Navigate" -> "Go to Test/Tested class". Likewise, from the code, you should be able to use the same "Navigate" menu to go to the tests. +If writing tests is new to you, poke around existing unit tests which all end in ``Test.java`` and live under ``src/test``. +Each test is annotated with ``@Test`` and should have at least one assertion which specifies the expected result. +In Netbeans, you can run all the tests in it by clicking "Run" -> "Test File". +From the test file, you should be able to navigate to the code that's being tested by right-clicking on the file and clicking "Navigate" -> "Go to Test/Tested class". +Likewise, from the code, you should be able to use the same "Navigate" menu to go to the tests. NOTE: Please remember when writing tests checking possibly localized outputs to check against ``en_US.UTF-8`` and ``UTC`` l10n strings! @@ -62,22 +64,24 @@ Refactoring Code to Make It Unit-Testable Existing code is not necessarily written in a way that lends itself to easy testing. Generally speaking, it is difficult to write unit tests for both JSF "backing" beans (which end in ``Page.java``) and "service" beans (which end in ``Service.java``) because they require the database to be running in order to test them. If service beans can be exercised via API they can be tested with integration tests (described below) but a good technique for making the logic testable it to move code to "util beans" (which end in ``Util.java``) that operate on Plain Old Java Objects (POJOs). ``PrivateUrlUtil.java`` is a good example of moving logic from ``PrivateUrlServiceBean.java`` to a "util" bean to make the code testable. -Parameterized Tests and JUnit Theories -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Parameterized Tests +^^^^^^^^^^^^^^^^^^^ + Often times you will want to test a method multiple times with similar values. In order to avoid test bloat (writing a test for every data combination), JUnit offers Data-driven unit tests. This allows a test to be run for each set of defined data values. -JUnit 4 uses ``Parameterized.class`` and ``Theories.class``. For reference, take a look at issue https://github.com/IQSS/dataverse/issues/5619. - -JUnit 5 doesn't offer theories (see `jqwik `_ for this), but -greatly extended parameterized testing. Some guidance how to write those: +JUnit 5 offers great parameterized testing. Some guidance how to write those: - https://junit.org/junit5/docs/current/user-guide/#writing-tests-parameterized-tests - https://www.baeldung.com/parameterized-tests-junit-5 - https://blog.codefx.org/libraries/junit-5-parameterized-tests/ -- See also some examples in our codebase. +- See also many examples in our codebase. + +Note that JUnit 5 also offers support for custom test parameter resolvers. This enables keeping tests cleaner, +as preparation might happen within some extension and the test code is more focused on the actual testing. +See https://junit.org/junit5/docs/current/user-guide/#extensions-parameter-resolution for more information. JUnit 5 Test Helper Extensions ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -116,11 +120,14 @@ In addition, there is a writeup on "The Testable Command" at https://github.com/ Running Non-Essential (Excluded) Unit Tests ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -You should be aware that some unit tests have been deemed "non-essential" and have been annotated with ``@Category(NonEssentialTests.class)`` and are excluded from the "dev" Maven profile, which is the default profile. All unit tests (that have not been annotated with ``@Ignore``), including these non-essential tests, are run from continuous integration systems such as Jenkins and GitHub Actions with the following ``mvn`` command that invokes a non-default profile: +You should be aware that some unit tests have been deemed "non-essential" and have been annotated with ``@Tag(Tags.NOT_ESSENTIAL_UNITTESTS)`` and are excluded from the "dev" Maven profile, which is the default profile. +All unit tests (that have not been annotated with ``@Disable``), including these non-essential tests, are run from continuous integration systems such as Jenkins and GitHub Actions with the following ``mvn`` command that invokes a non-default profile: ``mvn test -P all-unit-tests`` -Generally speaking, unit tests have been flagged as non-essential because they are slow or because they require an Internet connection. You should not feel obligated to run these tests continuously but you can use the ``mvn`` command above to run them. To iterate on the unit test in Netbeans and execute it with "Run -> Test File", you must temporarily comment out the annotation flagging the test as non-essential. +Generally speaking, unit tests have been flagged as non-essential because they are slow or because they require an Internet connection. +You should not feel obligated to run these tests continuously but you can use the ``mvn`` command above to run them. +To iterate on the unit test in Netbeans and execute it with "Run -> Test File", you must temporarily comment out the annotation flagging the test as non-essential. Integration Tests ----------------- @@ -173,12 +180,7 @@ Finally, run the script: Running the full API test suite using Docker ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. note:: - Sunsetting of this module is imminent.** There is no schedule yet, but expect it to go away. - Please let the `Dataverse Containerization Working Group `_ know if you are a user and - what should be preserved. - -To run the full suite of integration tests on your laptop, we recommend using the "all in one" Docker configuration described in ``conf/docker-aio/readme.md`` in the root of the repo. +To run the full suite of integration tests on your laptop, running Dataverse and its dependencies in Docker, as explained in the :doc:`/container/dev-usage` section of the Container Guide. Alternatively, you can run tests against the app server running on your laptop by following the "getting set up" steps below. @@ -308,9 +310,9 @@ To run these tests, simply call out to Maven: Measuring Coverage of Integration Tests --------------------------------------- -Measuring the code coverage of integration tests with Jacoco requires several steps. In order to make these steps clear we'll use "/usr/local/payara5" as the Payara directory and "dataverse" as the Payara Unix user. +Measuring the code coverage of integration tests with Jacoco requires several steps. In order to make these steps clear we'll use "/usr/local/payara6" as the Payara directory and "dataverse" as the Payara Unix user. -Please note that this was tested under Glassfish 4 but it is hoped that the same steps will work with Payara 5. +Please note that this was tested under Glassfish 4 but it is hoped that the same steps will work with Payara. Add jacocoagent.jar to Payara ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -329,9 +331,9 @@ Note that we are running the following commands as the user "dataverse". In shor cd local/jacoco-0.8.1 wget https://github.com/jacoco/jacoco/releases/download/v0.8.1/jacoco-0.8.1.zip unzip jacoco-0.8.1.zip - /usr/local/payara5/bin/asadmin stop-domain - cp /home/dataverse/local/jacoco-0.8.1/lib/jacocoagent.jar /usr/local/payara5/glassfish/lib - /usr/local/payara5/bin/asadmin start-domain + /usr/local/payara6/bin/asadmin stop-domain + cp /home/dataverse/local/jacoco-0.8.1/lib/jacocoagent.jar /usr/local/payara6/glassfish/lib + /usr/local/payara6/bin/asadmin start-domain Add jacococli.jar to the WAR File ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -354,21 +356,21 @@ Run this as the "dataverse" user. .. code-block:: bash - /usr/local/payara5/bin/asadmin deploy dataverse-jacoco.war + /usr/local/payara6/bin/asadmin deploy dataverse-jacoco.war -Note that after deployment the file "/usr/local/payara5/glassfish/domains/domain1/config/jacoco.exec" exists and is empty. +Note that after deployment the file "/usr/local/payara6/glassfish/domains/domain1/config/jacoco.exec" exists and is empty. Run Integration Tests ~~~~~~~~~~~~~~~~~~~~~ Note that even though you see "docker-aio" in the command below, we assume you are not necessarily running the test suite within Docker. (Some day we'll probably move this script to another directory.) For this reason, we pass the URL with the normal port (8080) that app servers run on to the ``run-test-suite.sh`` script. -Note that "/usr/local/payara5/glassfish/domains/domain1/config/jacoco.exec" will become non-empty after you stop and start Payara. You must stop and start Payara before every run of the integration test suite. +Note that "/usr/local/payara6/glassfish/domains/domain1/config/jacoco.exec" will become non-empty after you stop and start Payara. You must stop and start Payara before every run of the integration test suite. .. code-block:: bash - /usr/local/payara5/bin/asadmin stop-domain - /usr/local/payara5/bin/asadmin start-domain + /usr/local/payara6/bin/asadmin stop-domain + /usr/local/payara6/bin/asadmin start-domain git clone https://github.com/IQSS/dataverse.git cd dataverse conf/docker-aio/run-test-suite.sh http://localhost:8080 @@ -383,7 +385,7 @@ Run these commands as the "dataverse" user. The ``cd dataverse`` means that you .. code-block:: bash cd dataverse - java -jar /home/dataverse/local/jacoco-0.8.1/lib/jacococli.jar report --classfiles target/classes --sourcefiles src/main/java --html target/coverage-it/ /usr/local/payara5/glassfish/domains/domain1/config/jacoco.exec + java -jar /home/dataverse/local/jacoco-0.8.1/lib/jacococli.jar report --classfiles target/classes --sourcefiles src/main/java --html target/coverage-it/ /usr/local/payara6/glassfish/domains/domain1/config/jacoco.exec Read Code Coverage Report ~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -504,7 +506,6 @@ Browser-Based Testing Installation Testing ~~~~~~~~~~~~~~~~~~~~ -- Run `vagrant up` on a server to test the installer - Work with @donsizemore to automate testing of https://github.com/GlobalDataverseCommunityConsortium/dataverse-ansible Future Work on Load/Performance Testing diff --git a/doc/sphinx-guides/source/developers/tips.rst b/doc/sphinx-guides/source/developers/tips.rst index bf75a05f84e..e1ee40cafa5 100755 --- a/doc/sphinx-guides/source/developers/tips.rst +++ b/doc/sphinx-guides/source/developers/tips.rst @@ -19,20 +19,20 @@ Undeploy the war File from the Dataverse Software Installation Script Because the initial deployment of the war file was done outside of Netbeans by the Dataverse Software installation script, it's a good idea to undeploy that war file to give Netbeans a clean slate to work with. -Assuming you installed Payara in ``/usr/local/payara5``, run the following ``asadmin`` command to see the version of the Dataverse Software that the Dataverse Software installation script deployed: +Assuming you installed Payara in ``/usr/local/payara6``, run the following ``asadmin`` command to see the version of the Dataverse Software that the Dataverse Software installation script deployed: -``/usr/local/payara5/bin/asadmin list-applications`` +``/usr/local/payara6/bin/asadmin list-applications`` You will probably see something like ``dataverse-5.0 `` as the output. To undeploy, use whichever version you see like this: -``/usr/local/payara5/bin/asadmin undeploy dataverse-5.0`` +``/usr/local/payara6/bin/asadmin undeploy dataverse-5.0`` Now that Payara doesn't have anything deployed, we can proceed with getting Netbeans set up to deploy the code. Add Payara as a Server in Netbeans ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Launch Netbeans and click "Tools" and then "Servers". Click "Add Server" and select "Payara Server" and set the installation location to ``/usr/local/payara5``. The defaults are fine so you can click "Next" and "Finish". +Launch Netbeans and click "Tools" and then "Servers". Click "Add Server" and select "Payara Server" and set the installation location to ``/usr/local/payara6``. The defaults are fine so you can click "Next" and "Finish". Please note that if you are on a Mac, Netbeans may be unable to start Payara due to proxy settings in Netbeans. Go to the "General" tab in Netbeans preferences and click "Test connection" to see if you are affected. If you get a green checkmark, you're all set. If you get a red exclamation mark, change "Proxy Settings" to "No Proxy" and retest. A more complicated answer having to do with changing network settings is available at https://discussions.apple.com/thread/7680039?answerId=30715103022#30715103022 and the bug is also described at https://netbeans.org/bugzilla/show_bug.cgi?id=268076 @@ -117,7 +117,7 @@ Deploying With ``asadmin`` Sometimes you want to deploy code without using Netbeans or from the command line on a server you have ssh'ed into. -For the ``asadmin`` commands below, we assume you have already changed directories to ``/usr/local/payara5/glassfish/bin`` or wherever you have installed Payara. +For the ``asadmin`` commands below, we assume you have already changed directories to ``/usr/local/payara6/glassfish/bin`` or wherever you have installed Payara. There are four steps to this process: diff --git a/doc/sphinx-guides/source/developers/tools.rst b/doc/sphinx-guides/source/developers/tools.rst index cbd27d6e8d2..a21becd14cf 100755 --- a/doc/sphinx-guides/source/developers/tools.rst +++ b/doc/sphinx-guides/source/developers/tools.rst @@ -25,21 +25,6 @@ Maven With Maven installed you can run ``mvn package`` and ``mvn test`` from the command line. It can be downloaded from https://maven.apache.org -.. _vagrant: - -Vagrant -+++++++ - -Vagrant allows you to spin up a virtual machine running the Dataverse Software on your development workstation. You'll need to install Vagrant from https://www.vagrantup.com and VirtualBox from https://www.virtualbox.org. - -We assume you have already cloned the repo from https://github.com/IQSS/dataverse as explained in the :doc:`/developers/dev-environment` section. - -From the root of the git repo (where the ``Vagrantfile`` is), run ``vagrant up`` and eventually you should be able to reach a Dataverse installation at http://localhost:8888 (the ``forwarded_port`` indicated in the ``Vagrantfile``). - -Please note that running ``vagrant up`` for the first time should run the ``downloads/download.sh`` script for you to download required software such as an app server, Solr, etc. However, these dependencies change over time so it's a place to look if ``vagrant up`` was working but later fails. - -On Windows if you see an error like ``/usr/bin/perl^M: bad interpreter`` you might need to run ``dos2unix`` on the installation scripts. - PlantUML ++++++++ diff --git a/doc/sphinx-guides/source/installation/advanced.rst b/doc/sphinx-guides/source/installation/advanced.rst index a842d566595..87f2a4fd0ab 100644 --- a/doc/sphinx-guides/source/installation/advanced.rst +++ b/doc/sphinx-guides/source/installation/advanced.rst @@ -13,8 +13,8 @@ Multiple App Servers You should be conscious of the following when running multiple app servers. - Only one app server can be the dedicated timer server, as explained in the :doc:`/admin/timers` section of the Admin Guide. -- When users upload a logo or footer for their Dataverse collection using the "theme" feature described in the :doc:`/user/dataverse-management` section of the User Guide, these logos are stored only on the app server the user happened to be on when uploading the logo. By default these logos and footers are written to the directory ``/usr/local/payara5/glassfish/domains/domain1/docroot/logos``. -- When a sitemap is created by an app server it is written to the filesystem of just that app server. By default the sitemap is written to the directory ``/usr/local/payara5/glassfish/domains/domain1/docroot/sitemap``. +- When users upload a logo or footer for their Dataverse collection using the "theme" feature described in the :doc:`/user/dataverse-management` section of the User Guide, these logos are stored only on the app server the user happened to be on when uploading the logo. By default these logos and footers are written to the directory ``/usr/local/payara6/glassfish/domains/domain1/docroot/logos``. +- When a sitemap is created by an app server it is written to the filesystem of just that app server. By default the sitemap is written to the directory ``/usr/local/payara6/glassfish/domains/domain1/docroot/sitemap``. - If Make Data Count is used, its raw logs must be copied from each app server to single instance of Counter Processor. See also :ref:`:MDCLogPath` section in the Configuration section of this guide and the :doc:`/admin/make-data-count` section of the Admin Guide. - Dataset draft version logging occurs separately on each app server. See :ref:`edit-draft-versions-logging` section in Monitoring of the Admin Guide for details. - Password aliases (``dataverse.db.password``, etc.) are stored per app server. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 8493702406b..f9fe74afc7c 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -501,7 +501,7 @@ Logging & Slow Performance File Storage: Using a Local Filesystem and/or Swift and/or Object Stores and/or Trusted Remote Stores ----------------------------------------------------------------------------------------------------- -By default, a Dataverse installation stores all data files (files uploaded by end users) on the filesystem at ``/usr/local/payara5/glassfish/domains/domain1/files``. This path can vary based on answers you gave to the installer (see the :ref:`dataverse-installer` section of the Installation Guide) or afterward by reconfiguring the ``dataverse.files.\.directory`` JVM option described below. +By default, a Dataverse installation stores all data files (files uploaded by end users) on the filesystem at ``/usr/local/payara6/glassfish/domains/domain1/files``. This path can vary based on answers you gave to the installer (see the :ref:`dataverse-installer` section of the Installation Guide) or afterward by reconfiguring the ``dataverse.files.\.directory`` JVM option described below. A Dataverse installation can alternately store files in a Swift or S3-compatible object store, and can now be configured to support multiple stores at once. With a multi-store configuration, the location for new files can be controlled on a per-Dataverse collection basis. @@ -975,7 +975,7 @@ All of these processes are triggered after finishing transfers over the wire and Before being moved there, - JSF Web UI uploads are stored at :ref:`${dataverse.files.uploads} `, defaulting to - ``/usr/local/payara5/glassfish/domains/domain1/uploads`` folder in a standard installation. This place is + ``/usr/local/payara6/glassfish/domains/domain1/uploads`` folder in a standard installation. This place is configurable and might be set to a separate disk volume where stale uploads are purged periodically. - API uploads are stored at the system's temporary files location indicated by the Java system property ``java.io.tmpdir``, defaulting to ``/tmp`` on Linux. If this location is backed by a `tmpfs `_ @@ -1053,7 +1053,7 @@ Custom Navbar Logo The Dataverse Software allows you to replace the default Dataverse Project icon and name branding in the navbar with your own custom logo. Note that this logo is separate from the logo used in the theme of the root Dataverse collection (see :ref:`theme`). -The custom logo image file is expected to be small enough to fit comfortably in the navbar, no more than 50 pixels in height and 160 pixels in width. Create a ``navbar`` directory in your Payara ``logos`` directory and place your custom logo there. By default, your logo image file will be located at ``/usr/local/payara5/glassfish/domains/domain1/docroot/logos/navbar/logo.png``. +The custom logo image file is expected to be small enough to fit comfortably in the navbar, no more than 50 pixels in height and 160 pixels in width. Create a ``navbar`` directory in your Payara ``logos`` directory and place your custom logo there. By default, your logo image file will be located at ``/usr/local/payara6/glassfish/domains/domain1/docroot/logos/navbar/logo.png``. Given this location for the custom logo image file, run this curl command to add it to your settings: @@ -1518,7 +1518,7 @@ The Google Cloud Archiver also requires a key file that must be renamed to 'goog For example: -``cp /usr/local/payara5/glassfish/domains/domain1/files/googlecloudkey.json`` +``cp /usr/local/payara6/glassfish/domains/domain1/files/googlecloudkey.json`` .. _S3 Archiver Configuration: @@ -1634,7 +1634,7 @@ You have a couple of options for putting an updated robots.txt file into product For more of an explanation of ``ProxyPassMatch`` see the :doc:`shibboleth` section. -If you are not fronting Payara with Apache you'll need to prevent Payara from serving the robots.txt file embedded in the war file by overwriting robots.txt after the war file has been deployed. The downside of this technique is that you will have to remember to overwrite robots.txt in the "exploded" war file each time you deploy the war file, which probably means each time you upgrade to a new version of the Dataverse Software. Furthermore, since the version of the Dataverse Software is always incrementing and the version can be part of the file path, you will need to be conscious of where on disk you need to replace the file. For example, for Dataverse Software 4.6.1 the path to robots.txt may be ``/usr/local/payara5/glassfish/domains/domain1/applications/dataverse-4.6.1/robots.txt`` with the version number ``4.6.1`` as part of the path. +If you are not fronting Payara with Apache you'll need to prevent Payara from serving the robots.txt file embedded in the war file by overwriting robots.txt after the war file has been deployed. The downside of this technique is that you will have to remember to overwrite robots.txt in the "exploded" war file each time you deploy the war file, which probably means each time you upgrade to a new version of the Dataverse Software. Furthermore, since the version of the Dataverse Software is always incrementing and the version can be part of the file path, you will need to be conscious of where on disk you need to replace the file. For example, for Dataverse Software 4.6.1 the path to robots.txt may be ``/usr/local/payara6/glassfish/domains/domain1/applications/dataverse-4.6.1/robots.txt`` with the version number ``4.6.1`` as part of the path. Creating a Sitemap and Submitting it to Search Engines ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1647,7 +1647,7 @@ Create or update your sitemap by adding the following curl command to cron to ru This will create or update a file in the following location unless you have customized your installation directory for Payara: -``/usr/local/payara5/glassfish/domains/domain1/docroot/sitemap/sitemap.xml`` +``/usr/local/payara6/glassfish/domains/domain1/docroot/sitemap/sitemap.xml`` On Dataverse installation with many datasets, the creation or updating of the sitemap can take a while. You can check Payara's server.log file for "BEGIN updateSiteMap" and "END updateSiteMap" lines to know when the process started and stopped and any errors in between. @@ -1690,7 +1690,7 @@ When changing values these values with ``asadmin``, you'll need to delete the ol ``./asadmin create-jvm-options "-Ddataverse.fqdn=dataverse.example.com"`` -It's also possible to change these values by stopping Payara, editing ``payara5/glassfish/domains/domain1/config/domain.xml``, and restarting Payara. +It's also possible to change these values by stopping Payara, editing ``payara6/glassfish/domains/domain1/config/domain.xml``, and restarting Payara. .. _dataverse.fqdn: @@ -1786,7 +1786,7 @@ Configure a folder to store the incoming file stream during uploads (before tran Please also see :ref:`temporary-file-storage` for more details. You can use an absolute path or a relative, which is relative to the application server domain directory. -Defaults to ``./uploads``, which resolves to ``/usr/local/payara5/glassfish/domains/domain1/uploads`` in a default +Defaults to ``./uploads``, which resolves to ``/usr/local/payara6/glassfish/domains/domain1/uploads`` in a default installation. Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_FILES_UPLOADS``. @@ -3485,7 +3485,7 @@ Sets how long a cached metrics result is used before re-running the query for a Sets the path where the raw Make Data Count logs are stored before being processed. If not set, no logs will be created for Make Data Count. See also the :doc:`/admin/make-data-count` section of the Admin Guide. -``curl -X PUT -d '/usr/local/payara5/glassfish/domains/domain1/logs' http://localhost:8080/api/admin/settings/:MDCLogPath`` +``curl -X PUT -d '/usr/local/payara6/glassfish/domains/domain1/logs' http://localhost:8080/api/admin/settings/:MDCLogPath`` .. _:DisplayMDCMetrics: diff --git a/doc/sphinx-guides/source/installation/installation-main.rst b/doc/sphinx-guides/source/installation/installation-main.rst index 8559d6ce194..021a97415e3 100755 --- a/doc/sphinx-guides/source/installation/installation-main.rst +++ b/doc/sphinx-guides/source/installation/installation-main.rst @@ -28,8 +28,8 @@ Unpack the zip file - this will create the directory ``dvinstall``. Just make sure the user running the installer has write permission to: -- /usr/local/payara5/glassfish/lib -- /usr/local/payara5/glassfish/domains/domain1 +- /usr/local/payara6/glassfish/lib +- /usr/local/payara6/glassfish/domains/domain1 - the current working directory of the installer (it currently writes its logfile there), and - your jvm-option specified files.dir @@ -47,7 +47,7 @@ Follow the instructions in the text file. The script will prompt you for some configuration values. If this is a test/evaluation installation, it may be possible to accept the default values provided for most of the settings: - Internet Address of your host: localhost -- Payara Directory: /usr/local/payara5 +- Payara Directory: /usr/local/payara6 - Payara User: current user running the installer script - Administrator email address for this Dataverse installation: (none) - SMTP (mail) server to relay notification messages: localhost @@ -98,7 +98,7 @@ The supplied site URL will be saved under the JVM option :ref:`dataverse.siteUrl **IMPORTANT:** Please note, that "out of the box" the installer will configure the Dataverse installation to leave unrestricted access to the administration APIs from (and only from) localhost. Please consider the security implications of this arrangement (anyone with shell access to the server can potentially mess with your Dataverse installation). An alternative solution would be to block open access to these sensitive API endpoints completely; and to only allow requests supplying a pre-defined "unblock token" (password). If you prefer that as a solution, please consult the supplied script ``post-install-api-block.sh`` for examples on how to set it up. See also "Securing Your Installation" under the :doc:`config` section. -The Dataverse Software uses JHOVE_ to help identify the file format (CSV, PNG, etc.) for files that users have uploaded. The installer places files called ``jhove.conf`` and ``jhoveConfig.xsd`` into the directory ``/usr/local/payara5/glassfish/domains/domain1/config`` by default and makes adjustments to the jhove.conf file based on the directory into which you chose to install Payara. +The Dataverse Software uses JHOVE_ to help identify the file format (CSV, PNG, etc.) for files that users have uploaded. The installer places files called ``jhove.conf`` and ``jhoveConfig.xsd`` into the directory ``/usr/local/payara6/glassfish/domains/domain1/config`` by default and makes adjustments to the jhove.conf file based on the directory into which you chose to install Payara. .. _JHOVE: http://jhove.openpreservation.org @@ -249,7 +249,7 @@ Deleting Uploaded Files The path below will depend on the value for ``dataverse.files.directory`` as described in the :doc:`config` section: -``rm -rf /usr/local/payara5/glassfish/domains/domain1/files`` +``rm -rf /usr/local/payara6/glassfish/domains/domain1/files`` Rerun Installer ^^^^^^^^^^^^^^^ diff --git a/doc/sphinx-guides/source/installation/intro.rst b/doc/sphinx-guides/source/installation/intro.rst index 2251af7b81b..67fc774bdbd 100644 --- a/doc/sphinx-guides/source/installation/intro.rst +++ b/doc/sphinx-guides/source/installation/intro.rst @@ -48,7 +48,7 @@ If you've encountered a problem installing Dataverse and are ready to ask for he - Operating system (usually a Linux distribution) and version. - Output from the installer (STDOUT, STDERR). - The ``scripts/api/setup-all.*.log`` files left behind by the installer. -- The ``server.log`` file from Payara (by default at ``/usr/local/payara5/glassfish/domains/domain1/logs/server.log``). +- The ``server.log`` file from Payara (by default at ``/usr/local/payara6/glassfish/domains/domain1/logs/server.log``). Improving this Guide -------------------- diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst index d95aa78bb26..d94ef9fed9d 100644 --- a/doc/sphinx-guides/source/installation/prerequisites.rst +++ b/doc/sphinx-guides/source/installation/prerequisites.rst @@ -19,7 +19,7 @@ We assume you plan to run your Dataverse installation on Linux and we recommend Java ---- -The Dataverse Software requires Java SE 11 (or higher). +The Dataverse Software requires Java SE 17 (or higher). Installing Java =============== @@ -30,11 +30,11 @@ The Oracle JDK can be downloaded from http://www.oracle.com/technetwork/java/jav On a RHEL/derivative, install OpenJDK (devel version) using yum:: - # sudo yum install java-11-openjdk + # sudo yum install java-17-openjdk -If you have multiple versions of Java installed, Java 11 should be the default when ``java`` is invoked from the command line. You can test this by running ``java -version``. +If you have multiple versions of Java installed, Java 17 should be the default when ``java`` is invoked from the command line. You can test this by running ``java -version``. -On RHEL/derivative you can make Java 11 the default with the ``alternatives`` command, having it prompt you to select the version of Java from a list:: +On RHEL/derivative you can make Java 17 the default with the ``alternatives`` command, having it prompt you to select the version of Java from a list:: # alternatives --config java @@ -44,7 +44,7 @@ On RHEL/derivative you can make Java 11 the default with the ``alternatives`` co Payara ------ -Payara 5.2022.3 is recommended. Newer versions might work fine, regular updates are recommended. +Payara 6.2023.8 is recommended. Newer versions might work fine. Regular updates are recommended. Installing Payara ================= @@ -53,11 +53,11 @@ Installing Payara # useradd dataverse -- Download and install Payara (installed in ``/usr/local/payara5`` in the example commands below):: +- Download and install Payara (installed in ``/usr/local/payara6`` in the example commands below):: - # wget https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/5.2022.3/payara-5.2022.3.zip - # unzip payara-5.2022.3.zip - # mv payara5 /usr/local + # wget https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2023.8/payara-6.2023.8.zip + # unzip payara-6.2023.8.zip + # mv payara6 /usr/local If nexus.payara.fish is ever down for maintenance, Payara distributions are also available from https://repo1.maven.org/maven2/fish/payara/distributions/payara/ @@ -65,15 +65,15 @@ If you intend to install and run Payara under a service account (and we hope you - Set service account permissions:: - # chown -R root:root /usr/local/payara5 - # chown dataverse /usr/local/payara5/glassfish/lib - # chown -R dataverse:dataverse /usr/local/payara5/glassfish/domains/domain1 + # chown -R root:root /usr/local/payara6 + # chown dataverse /usr/local/payara6/glassfish/lib + # chown -R dataverse:dataverse /usr/local/payara6/glassfish/domains/domain1 After installation, you may chown the lib/ directory back to root; the installer only needs write access to copy the JDBC driver into that directory. - Change from ``-client`` to ``-server`` under ``-client``:: - # vim /usr/local/payara5/glassfish/domains/domain1/config/domain.xml + # vim /usr/local/payara6/glassfish/domains/domain1/config/domain.xml This recommendation comes from http://www.c2b2.co.uk/middleware-blog/glassfish-4-performance-tuning-monitoring-and-troubleshooting.php among other places. @@ -154,12 +154,12 @@ Configuring Database Access for the Dataverse Installation (and the Dataverse So Solr ---- -The Dataverse Software search index is powered by Solr. +The Dataverse software search index is powered by Solr. Supported Versions ================== -The Dataverse Software has been tested with Solr version 8.11.1. Future releases in the 8.x series are likely to be compatible; however, this cannot be confirmed until they are officially tested. Major releases above 8.x (e.g. 9.x) are not supported. +The Dataverse software has been tested with Solr version 9.3.0. Future releases in the 9.x series are likely to be compatible. Please get in touch (:ref:`support`) if you are having trouble with a newer version. Installing Solr =============== @@ -174,19 +174,19 @@ Become the ``solr`` user and then download and configure Solr:: su - solr cd /usr/local/solr - wget https://archive.apache.org/dist/lucene/solr/8.11.1/solr-8.11.1.tgz - tar xvzf solr-8.11.1.tgz - cd solr-8.11.1 + wget https://archive.apache.org/dist/solr/solr/9.3.0/solr-9.3.0.tgz + tar xvzf solr-9.3.0.tgz + cd solr-9.3.0 cp -r server/solr/configsets/_default server/solr/collection1 You should already have a "dvinstall.zip" file that you downloaded from https://github.com/IQSS/dataverse/releases . Unzip it into ``/tmp``. Then copy the files into place:: - cp /tmp/dvinstall/schema*.xml /usr/local/solr/solr-8.11.1/server/solr/collection1/conf - cp /tmp/dvinstall/solrconfig.xml /usr/local/solr/solr-8.11.1/server/solr/collection1/conf + cp /tmp/dvinstall/schema*.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf + cp /tmp/dvinstall/solrconfig.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf Note: The Dataverse Project team has customized Solr to boost results that come from certain indexed elements inside the Dataverse installation, for example prioritizing results from Dataverse collections over Datasets. If you would like to remove this, edit your ``solrconfig.xml`` and remove the ```` element and its contents. If you have ideas about how this boosting could be improved, feel free to contact us through our Google Group https://groups.google.com/forum/#!forum/dataverse-dev . -A Dataverse installation requires a change to the ``jetty.xml`` file that ships with Solr. Edit ``/usr/local/solr/solr-8.11.1/server/etc/jetty.xml`` , increasing ``requestHeaderSize`` from ``8192`` to ``102400`` +A Dataverse installation requires a change to the ``jetty.xml`` file that ships with Solr. Edit ``/usr/local/solr/solr-9.3.0/server/etc/jetty.xml`` , increasing ``requestHeaderSize`` from ``8192`` to ``102400`` Solr will warn about needing to increase the number of file descriptors and max processes in a production environment but will still run with defaults. We have increased these values to the recommended levels by adding ulimit -n 65000 to the init script, and the following to ``/etc/security/limits.conf``:: @@ -205,7 +205,7 @@ Solr launches asynchronously and attempts to use the ``lsof`` binary to watch fo Finally, you need to tell Solr to create the core "collection1" on startup:: - echo "name=collection1" > /usr/local/solr/solr-8.11.1/server/solr/collection1/core.properties + echo "name=collection1" > /usr/local/solr/solr-9.3.0/server/solr/collection1/core.properties Solr Init Script ================ diff --git a/doc/sphinx-guides/source/versions.rst b/doc/sphinx-guides/source/versions.rst index e2a80d030e2..d5ffb2acb66 100755 --- a/doc/sphinx-guides/source/versions.rst +++ b/doc/sphinx-guides/source/versions.rst @@ -7,7 +7,8 @@ Dataverse Software Documentation Versions This list provides a way to refer to the documentation for previous and future versions of the Dataverse Software. In order to learn more about the updates delivered from one version to another, visit the `Releases `__ page in our GitHub repo. - pre-release `HTML (not final!) `__ and `PDF (experimental!) `__ built from the :doc:`develop ` branch :doc:`(how to contribute!) ` -- 5.13 +- 5.14 +- `5.13 `__ - `5.12.1 `__ - `5.12 `__ - `5.11.1 `__ diff --git a/downloads/.gitignore b/downloads/.gitignore deleted file mode 100644 index 1b51bf4def7..00000000000 --- a/downloads/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -payara-5.201.zip -solr-7.3.0.tgz -weld-osgi-bundle-2.2.10.Final-glassfish4.jar -schemaSpy_5.0.0.jar diff --git a/downloads/download.sh b/downloads/download.sh deleted file mode 100755 index 7b9de0397cb..00000000000 --- a/downloads/download.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh -curl -L -O https://s3-eu-west-1.amazonaws.com/payara.fish/Payara+Downloads/5.2022.3/payara-5.2022.3.zip -curl -L -O https://archive.apache.org/dist/lucene/solr/8.11.1/solr-8.11.1.tgz -curl -L -O https://search.maven.org/remotecontent?filepath=org/jboss/weld/weld-osgi-bundle/2.2.10.Final/weld-osgi-bundle-2.2.10.Final-glassfish4.jar -curl -s -L http://sourceforge.net/projects/schemaspy/files/schemaspy/SchemaSpy%205.0.0/schemaSpy_5.0.0.jar/download > schemaSpy_5.0.0.jar diff --git a/downloads/stata-13-test-files/Stata14TestFile.dta b/downloads/stata-13-test-files/Stata14TestFile.dta deleted file mode 100644 index 6f1c31dc798..00000000000 Binary files a/downloads/stata-13-test-files/Stata14TestFile.dta and /dev/null differ diff --git a/local_lib/com/apicatalog/titanium-json-ld/1.3.0-SNAPSHOT/titanium-json-ld-1.3.0-SNAPSHOT.jar b/local_lib/com/apicatalog/titanium-json-ld/1.3.0-SNAPSHOT/titanium-json-ld-1.3.0-SNAPSHOT.jar deleted file mode 100644 index ee499ae4b76..00000000000 Binary files a/local_lib/com/apicatalog/titanium-json-ld/1.3.0-SNAPSHOT/titanium-json-ld-1.3.0-SNAPSHOT.jar and /dev/null differ diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar deleted file mode 100644 index dc41f94046f..00000000000 Binary files a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar and /dev/null differ diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.md5 b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.md5 deleted file mode 100644 index 7018ea4e822..00000000000 --- a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.md5 +++ /dev/null @@ -1 +0,0 @@ -eeef5c0dc201d1105b9529a51fa8cdab diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.sha1 b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.sha1 deleted file mode 100644 index 97f192f3732..00000000000 --- a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1fa716d318920fd59fc63f77965d113decf97355 diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom deleted file mode 100644 index ea2e4c03f9f..00000000000 --- a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom +++ /dev/null @@ -1,8 +0,0 @@ - - - 4.0.0 - edu.harvard.iq.dvn - unf5 - 5.0 - diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.md5 b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.md5 deleted file mode 100644 index a88cf2a1c02..00000000000 --- a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.md5 +++ /dev/null @@ -1 +0,0 @@ -2df5dac09375e1e7fcd66c705d9ca2ef diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.sha1 b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.sha1 deleted file mode 100644 index 967b977b79e..00000000000 --- a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.sha1 +++ /dev/null @@ -1 +0,0 @@ -431cd55e2e9379677d14e402dd3c474bb7be4ac9 diff --git a/local_lib/net/handle/handle/8.1.1/handle-8.1.1.jar b/local_lib/net/handle/handle/8.1.1/handle-8.1.1.jar deleted file mode 100644 index 1f8e1c3eb12..00000000000 Binary files a/local_lib/net/handle/handle/8.1.1/handle-8.1.1.jar and /dev/null differ diff --git a/local_lib/net/handle/handle/8.1.1/handle-8.1.1.pom b/local_lib/net/handle/handle/8.1.1/handle-8.1.1.pom deleted file mode 100644 index e3c09349172..00000000000 --- a/local_lib/net/handle/handle/8.1.1/handle-8.1.1.pom +++ /dev/null @@ -1,9 +0,0 @@ - - - 4.0.0 - net.handle - handle - 8.1.1 - POM was created from install:install-file - diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar deleted file mode 100644 index b3bddd62c24..00000000000 Binary files a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar and /dev/null differ diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.md5 b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.md5 deleted file mode 100644 index 576062f55a1..00000000000 --- a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.md5 +++ /dev/null @@ -1 +0,0 @@ -b0abb2fee242c479f305f47352600bbf diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.sha1 b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.sha1 deleted file mode 100644 index e81e8450ef0..00000000000 --- a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9643e138cb5ed2684838e4b4faa118adfb2ecb4b diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom deleted file mode 100644 index b57cd67278b..00000000000 --- a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom +++ /dev/null @@ -1,8 +0,0 @@ - - - 4.0.0 - nom.tam.fits - fits - 2012-10-25-generated - diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.md5 b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.md5 deleted file mode 100644 index 777b4df3325..00000000000 --- a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.md5 +++ /dev/null @@ -1 +0,0 @@ -23ca47c46df791f220a87cfef3b2190c diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.sha1 b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.sha1 deleted file mode 100644 index b5f41fd1a69..00000000000 --- a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.sha1 +++ /dev/null @@ -1 +0,0 @@ -c1ec9dfbbc72dc4623d309d772b804e47284ee27 diff --git a/local_lib/nom/tam/fits/fits/maven-metadata.xml b/local_lib/nom/tam/fits/fits/maven-metadata.xml deleted file mode 100644 index 4fc3254df3f..00000000000 --- a/local_lib/nom/tam/fits/fits/maven-metadata.xml +++ /dev/null @@ -1,12 +0,0 @@ - - - nom.tam.fits - fits - - 2012-10-25-generated - - 2012-10-25-generated - - 20130925190525 - - diff --git a/local_lib/nom/tam/fits/fits/maven-metadata.xml.md5 b/local_lib/nom/tam/fits/fits/maven-metadata.xml.md5 deleted file mode 100644 index b6d7e4a726f..00000000000 --- a/local_lib/nom/tam/fits/fits/maven-metadata.xml.md5 +++ /dev/null @@ -1 +0,0 @@ -545c78160393b4c80e40377f2a7cf406 \ No newline at end of file diff --git a/local_lib/nom/tam/fits/fits/maven-metadata.xml.sha1 b/local_lib/nom/tam/fits/fits/maven-metadata.xml.sha1 deleted file mode 100644 index 188cf8ae044..00000000000 --- a/local_lib/nom/tam/fits/fits/maven-metadata.xml.sha1 +++ /dev/null @@ -1 +0,0 @@ -9cf56b8ef3f2bacdc669c2c7cdcd7cd50ed38dbb \ No newline at end of file diff --git a/modules/container-base/src/main/docker/Dockerfile b/modules/container-base/src/main/docker/Dockerfile index bbd02a14328..97aa4cd2792 100644 --- a/modules/container-base/src/main/docker/Dockerfile +++ b/modules/container-base/src/main/docker/Dockerfile @@ -164,6 +164,8 @@ RUN <org.apache.jasper.servlet.JspServlet#org.apache.jasper.servlet.JspServlet\n \n development\n false\n \n \n genStrAsCharArray\n true\n #' "${DOMAIN_DIR}/config/default-web.xml" + sed -i 's#org.glassfish.wasp.servlet.JspServlet#org.glassfish.wasp.servlet.JspServlet\n \n development\n false\n \n \n genStrAsCharArray\n true\n #' "${DOMAIN_DIR}/config/default-web.xml" # Cleanup old CA certificates to avoid unnecessary log clutter during startup ${SCRIPT_DIR}/removeExpiredCaCerts.sh # Delete generated files diff --git a/modules/container-base/src/main/docker/scripts/removeExpiredCaCerts.sh b/modules/container-base/src/main/docker/scripts/removeExpiredCaCerts.sh index 205a9eda5d7..c019c09130e 100644 --- a/modules/container-base/src/main/docker/scripts/removeExpiredCaCerts.sh +++ b/modules/container-base/src/main/docker/scripts/removeExpiredCaCerts.sh @@ -8,6 +8,14 @@ set -euo pipefail KEYSTORE="${DOMAIN_DIR}/config/cacerts.jks" +if [ ! -r "${KEYSTORE}" ]; then + KEYSTORE="${DOMAIN_DIR}/config/cacerts.p12" + if [ ! -r "${KEYSTORE}" ]; then + echo "Could not find CA certs keystore" + exit 1 + fi +fi + keytool -list -v -keystore "${KEYSTORE}" -storepass changeit 2>/dev/null | \ grep -i 'alias\|until' > aliases.txt diff --git a/modules/container-configbaker/Dockerfile b/modules/container-configbaker/Dockerfile index cbda948db14..564216b3572 100644 --- a/modules/container-configbaker/Dockerfile +++ b/modules/container-configbaker/Dockerfile @@ -33,6 +33,7 @@ RUN chmod +x ${SCRIPT_DIR}/*.sh ${BOOTSTRAP_DIR}/*/*.sh # Copy the Solr config bits COPY --from=solr /opt/solr/server/solr/configsets/_default ${SOLR_TEMPLATE}/ COPY maven/solr/*.xml ${SOLR_TEMPLATE}/conf/ +RUN rm ${SOLR_TEMPLATE}/conf/managed-schema.xml # Copy the data from scripts/api that provide the common base setup you'd get from the installer. # ".dockerignore" will take care of taking only the bare necessities diff --git a/modules/container-configbaker/assembly.xml b/modules/container-configbaker/assembly.xml index f5b309175ed..3285eef510a 100644 --- a/modules/container-configbaker/assembly.xml +++ b/modules/container-configbaker/assembly.xml @@ -8,7 +8,7 @@ - conf/solr/8.11.1 + conf/solr/9.3.0 solr @@ -43,4 +43,4 @@ - \ No newline at end of file + diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index 7d46f7c49b9..646d1ea8026 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -131,9 +131,9 @@ - 5.13 + 5.14 - 11 + 17 UTF-8 -Xdoclint:none @@ -148,9 +148,9 @@ -Duser.timezone=${project.timezone} -Dfile.encoding=${project.build.sourceEncoding} -Duser.language=${project.language} -Duser.region=${project.region} - 5.2022.3 + 6.2023.8 42.5.1 - 8.11.1 + 9.3.0 1.12.290 0.177.0 @@ -170,11 +170,9 @@ 1.15.0 2.10.1 - - 4.13.1 - 5.7.0 - ${junit.jupiter.version} - 2.28.2 + + 5.10.0 + 5.4.0 9.3 @@ -193,6 +191,7 @@ 3.2.1 3.4.1 1.3.0 + 3.3.0 3.1.2 1.6.13 @@ -255,6 +254,11 @@ maven-failsafe-plugin ${maven-failsafe-plugin.version} + + org.apache.maven.plugins + maven-enforcer-plugin + ${maven-enforcer-plugin.version} + org.apache.maven.plugins maven-checkstyle-plugin @@ -314,6 +318,47 @@ + + + org.apache.maven.plugins + maven-enforcer-plugin + + + no-junit4 + generate-test-resources + + enforce + + + + + + junit:junit:*:*:test + org.junit:junit:*:*:test + org.junit.vintage:*:*:*:test + + + + + + + general-reqs + + enforce + + initialize + + + + + [${target.java.version}.0,) + + + + + + + oss-sonatype oss-sonatype + + https://oss.sonatype.org/content/repositories/snapshots/ + + + true + + + + s01-oss-sonatype + s01-oss-sonatype https://s01.oss.sonatype.org/content/repositories/snapshots/ @@ -385,7 +440,7 @@ true - --> + @@ -393,11 +448,9 @@ ct - 5.2022.5 + diff --git a/modules/dataverse-spi/pom.xml b/modules/dataverse-spi/pom.xml index 6235d309e89..b00053fe5e0 100644 --- a/modules/dataverse-spi/pom.xml +++ b/modules/dataverse-spi/pom.xml @@ -13,7 +13,7 @@ io.gdcc dataverse-spi - 1.0.0${project.version.suffix} + 2.0.0${project.version.suffix} jar Dataverse SPI Plugin API diff --git a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java index 228992c8288..d039ac39e8f 100644 --- a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java +++ b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java @@ -3,8 +3,8 @@ import java.io.InputStream; import java.util.Optional; -import javax.json.JsonArray; -import javax.json.JsonObject; +import jakarta.json.JsonArray; +import jakarta.json.JsonObject; /** * Provides all the metadata Dataverse has about a given dataset that can then diff --git a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/XMLExporter.java b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/XMLExporter.java index 9afe7ba1cfd..3c3fa35c69d 100644 --- a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/XMLExporter.java +++ b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/XMLExporter.java @@ -1,6 +1,6 @@ package io.gdcc.spi.export; -import javax.ws.rs.core.MediaType; +import jakarta.ws.rs.core.MediaType; /** * XML Exporter is an extension of the base Exporter interface that adds the diff --git a/pom.xml b/pom.xml index 96f598af0f5..1258761dd4c 100644 --- a/pom.xml +++ b/pom.xml @@ -96,7 +96,7 @@ io.gdcc sword2-server - 1.2.1 + 2.0.0 @@ -119,7 +119,7 @@ com.apicatalog titanium-json-ld - 1.3.0-SNAPSHOT + 1.3.2 com.google.code.gson @@ -160,12 +160,20 @@ flyway-core ${flyway.version} + + + org.eclipse.persistence + org.eclipse.persistence.jpa + provided + com.google.guava guava 29.0-jre jar + + org.eclipse.microprofile.config microprofile-config-api @@ -174,13 +182,14 @@ jakarta.platform jakarta.jakartaee-api - ${jakartaee-api.version} provided - + + + - org.glassfish - jakarta.json + org.eclipse.angus + angus-activation provided @@ -188,12 +197,20 @@ fish.payara.api payara-api provided + + ${payara.version} + + + - com.sun.mail - jakarta.mail + org.eclipse.parsson + jakarta.json provided + + org.glassfish jakarta.faces @@ -203,6 +220,7 @@ org.primefaces primefaces 11.0.0 + jakarta org.primefaces.themes @@ -212,9 +230,10 @@ org.omnifaces omnifaces - 3.8 + 4.0-M13 + jakarta.validation jakarta.validation-api @@ -225,9 +244,12 @@ hibernate-validator provided + + + - org.glassfish - jakarta.el + org.glassfish.expressly + expressly provided @@ -261,29 +283,23 @@ org.apache.solr solr-solrj - 8.11.1 + 9.3.0 colt colt 1.2.0 - + - nom.tam.fits - fits - 2012-10-25-generated + gov.nasa.gsfc.heasarc + nom-tam-fits + 1.18.0 net.handle - handle - 8.1.1 - - - - edu.harvard.iq.dvn - unf5 - 5.0 + handle-client + 9.3.1 @@ -343,18 +359,24 @@ org.ocpsoft.rewrite rewrite-servlet - 3.5.0.Final + 6.0.0-SNAPSHOT org.ocpsoft.rewrite rewrite-config-prettyfaces - 3.5.0.Final + 6.0.0-SNAPSHOT edu.ucsb.nceas ezid 1.0.0 jar + + + junit + junit + + org.jsoup @@ -394,7 +416,7 @@ oauth2-oidc-sdk 10.7.1 - + io.gdcc xoai-data-provider @@ -415,15 +437,13 @@ - org.glassfish.jersey.containers - jersey-container-servlet - 2.23.2 + org.glassfish.jersey.core + jersey-server org.glassfish.jersey.media jersey-media-multipart - 2.23.2 com.mashape.unirest @@ -513,7 +533,7 @@ io.gdcc dataverse-spi - 1.0.0 + 2.0.0 @@ -522,18 +542,6 @@ ${junit.jupiter.version} test - - junit - junit - ${junit.version} - test - - - org.junit.vintage - junit-vintage-engine - ${junit.vintage.version} - test - org.hamcrest hamcrest-library @@ -553,9 +561,9 @@ test - com.jayway.restassured + io.rest-assured rest-assured - 2.4.0 + 5.3.1 test @@ -574,6 +582,12 @@ org.testcontainers testcontainers test + + + junit + junit + + org.testcontainers @@ -761,7 +775,7 @@ true - edu.harvard.iq.dataverse.NonEssentialTests + not-essential-unittests diff --git a/scripts/dev/dev-rebuild.sh b/scripts/dev/dev-rebuild.sh index 71857b14068..9eae195b135 100755 --- a/scripts/dev/dev-rebuild.sh +++ b/scripts/dev/dev-rebuild.sh @@ -1,8 +1,9 @@ #!/bin/sh -PAYARA_DIR=/usr/local/payara5 +PAYARA_DIR=/usr/local/payara6 ASADMIN=$PAYARA_DIR/glassfish/bin/asadmin DB_NAME=dvndb DB_USER=dvnapp +export PGPASSWORD=secret echo "Checking if there is a war file to undeploy..." LIST_APP=$($ASADMIN list-applications -t) @@ -23,7 +24,7 @@ echo "Deleting ALL DATA FILES uploaded to Dataverse..." rm -rf $PAYARA_DIR/glassfish/domains/domain1/files echo "Terminating database sessions so we can drop the database..." -psql -U postgres -c " +psql -h localhost -U postgres -c " SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pg_stat_activity.datname = '$DB_NAME' @@ -31,14 +32,14 @@ WHERE pg_stat_activity.datname = '$DB_NAME' " template1 echo "Dropping the database..." -psql -U $DB_USER -c "DROP DATABASE \"$DB_NAME\"" template1 +psql -h localhost -U $DB_USER -c "DROP DATABASE \"$DB_NAME\"" template1 echo $? echo "Clearing out data from Solr..." -curl http://localhost:8983/solr/collection1/update/json?commit=true -H "Content-type: application/json" -X POST -d "{\"delete\": { \"query\":\"*:*\"}}" +curl "http://localhost:8983/solr/collection1/update/json?commit=true" -H "Content-type: application/json" -X POST -d "{\"delete\": { \"query\":\"*:*\"}}" echo "Creating a new database..." -psql -U $DB_USER -c "CREATE DATABASE \"$DB_NAME\" WITH OWNER = \"$DB_USER\"" template1 +psql -h localhost -U $DB_USER -c "CREATE DATABASE \"$DB_NAME\" WITH OWNER = \"$DB_USER\"" template1 echo $? echo "Starting app server..." @@ -53,7 +54,7 @@ cd scripts/api cd ../.. echo "Creating SQL sequence..." -psql -U $DB_USER $DB_NAME -f doc/sphinx-guides/source/_static/util/createsequence.sql +psql -h localhost -U $DB_USER $DB_NAME -f doc/sphinx-guides/source/_static/util/createsequence.sql echo "Setting DOI provider to \"FAKE\"..." curl http://localhost:8080/api/admin/settings/:DoiProvider -X PUT -d FAKE diff --git a/scripts/installer/Makefile b/scripts/installer/Makefile index d40d4d792ea..399bc65168a 100644 --- a/scripts/installer/Makefile +++ b/scripts/installer/Makefile @@ -7,7 +7,6 @@ JHOVE_SCHEMA=${INSTALLER_ZIP_DIR}/jhoveConfig.xsd SOLR_SCHEMA=${INSTALLER_ZIP_DIR}/schema.xml ${INSTALLER_ZIP_DIR}/update-fields.sh SOLR_CONFIG=${INSTALLER_ZIP_DIR}/solrconfig.xml PYTHON_FILES=${INSTALLER_ZIP_DIR}/README_python.txt ${INSTALLER_ZIP_DIR}/installConfig.py ${INSTALLER_ZIP_DIR}/installUtils.py ${INSTALLER_ZIP_DIR}/install.py ${INSTALLER_ZIP_DIR}/installAppServer.py ${INSTALLER_ZIP_DIR}/requirements.txt ${INSTALLER_ZIP_DIR}/default.config ${INSTALLER_ZIP_DIR}/interactive.config -INSTALL_SCRIPT=${INSTALLER_ZIP_DIR}/install installer: dvinstall.zip @@ -56,13 +55,13 @@ ${JHOVE_SCHEMA}: ../../conf/jhove/jhoveConfig.xsd ${INSTALLER_ZIP_DIR} @echo copying jhove schema file /bin/cp ../../conf/jhove/jhoveConfig.xsd ${INSTALLER_ZIP_DIR} -${SOLR_SCHEMA}: ../../conf/solr/8.11.1/schema.xml ../../conf/solr/8.11.1/update-fields.sh ${INSTALLER_ZIP_DIR} +${SOLR_SCHEMA}: ../../conf/solr/9.3.0/schema.xml ../../conf/solr/9.3.0/update-fields.sh ${INSTALLER_ZIP_DIR} @echo copying Solr schema file - /bin/cp ../../conf/solr/8.11.1/schema.xml ../../conf/solr/8.11.1/update-fields.sh ${INSTALLER_ZIP_DIR} + /bin/cp ../../conf/solr/9.3.0/schema.xml ../../conf/solr/9.3.0/update-fields.sh ${INSTALLER_ZIP_DIR} -${SOLR_CONFIG}: ../../conf/solr/8.11.1/solrconfig.xml ${INSTALLER_ZIP_DIR} +${SOLR_CONFIG}: ../../conf/solr/9.3.0/solrconfig.xml ${INSTALLER_ZIP_DIR} @echo copying Solr config file - /bin/cp ../../conf/solr/8.11.1/solrconfig.xml ${INSTALLER_ZIP_DIR} + /bin/cp ../../conf/solr/9.3.0/solrconfig.xml ${INSTALLER_ZIP_DIR} ${PYTHON_FILES}: README_python.txt install.py installConfig.py installAppServer.py installUtils.py requirements.txt default.config interactive.config ${INSTALLER_ZIP_DIR} @echo copying Python installer files diff --git a/scripts/installer/README.txt b/scripts/installer/README.txt index 350a17fc00c..c3ed8211082 100644 --- a/scripts/installer/README.txt +++ b/scripts/installer/README.txt @@ -1,42 +1 @@ -The installer script (install) can be run either by a developer (inside the source tree), or by an end-user installing the Dataverse. The latter will obtain the script as part of the distribution bundle; and they will be running it inside the unzipped bundle directory. - -In the former (developer) case, the installer will be looking for the files it needs in the other directories in the source tree. -For example, the war file (once built) can be found in ../../target/. The name of the war file will be dataverse-{VERSION}.war, where -{VERSION} is the version number of the Dataverse, obtained from the pom file (../../pom.xml). For example, as of writing this README.txt (July 2015) the war file is ../../target/dataverse-4.1.war/ - -When building a distribution archive, the Makefile will pile all the files that the installer needs in one directory (./dvinstall here) and then zip it up. We upload the resulting zip bundle on github as the actual software release. This way the end user only gets the files they actually need to install the Dataverse app. So they can do so without pulling the entire source tree. - - -The installer script itself (the perl script ./install) knows to look for all these files in 2 places (for example, it will look for the war file in ../../target/; if it's not there, it'll assume this is a distribution bundle and look for it as ./dataverse.war) - -Here's the list of the files that the installer needs: - -the war file: -target/dataverse-{VERSION}.war - -and also: - -from scripts/installer (this directory): - -install -glassfish-setup.sh - -from scripts/api: - -setup-all.sh -setup-builtin-roles.sh -setup-datasetfields.sh -setup-dvs.sh -setup-identity-providers.sh -setup-users.sh -data (the entire directory with all its contents) - -from conf/jhove: - -jhove.conf - -SOLR schema and config files, from conf/solr/8.11.1: - -schema.xml -schema_dv_mdb_fields.xml -solrconfig.xml +See README_python.txt diff --git a/scripts/installer/as-setup.sh b/scripts/installer/as-setup.sh index 49ebce059d2..fc5b378cff5 100755 --- a/scripts/installer/as-setup.sh +++ b/scripts/installer/as-setup.sh @@ -56,15 +56,15 @@ function preliminary_setup() # avoid OutOfMemoryError: PermGen per http://eugenedvorkin.com/java-lang-outofmemoryerror-permgen-space-error-during-deployment-to-glassfish/ #./asadmin $ASADMIN_OPTS list-jvm-options - # Note that these JVM options are different for Payara5 and Glassfish4: + # Note that these JVM options are different for Payara and Glassfish4: # old Glassfish4 options: (commented out) #./asadmin $ASADMIN_OPTS delete-jvm-options "-XX\:MaxPermSize=192m" #./asadmin $ASADMIN_OPTS create-jvm-options "-XX\:MaxPermSize=512m" #./asadmin $ASADMIN_OPTS create-jvm-options "-XX\:PermSize=256m" - # payara5 ships with the "-server" option already in domain.xml, so no need: + # Payara ships with the "-server" option already in domain.xml, so no need: #./asadmin $ASADMIN_OPTS delete-jvm-options -client - # new Payara5 options: (thanks to donsizemore@unc.edu) + # new Payara options: (thanks to donsizemore@unc.edu) ./asadmin $ASADMIN_OPTS create-jvm-options "-XX\:MaxMetaspaceSize=512m" ./asadmin $ASADMIN_OPTS create-jvm-options "-XX\:MetaspaceSize=256m" ./asadmin $ASADMIN_OPTS create-jvm-options "-Dfish.payara.classloading.delegate=false" @@ -116,6 +116,9 @@ function preliminary_setup() ./asadmin $ASADMIN_OPTS create-jvm-options "-Ddataverse.timerServer=true" + # Workaround for FISH-7722: Failed to deploy war with @Stateless https://github.com/payara/Payara/issues/6337 + ./asadmin $ASADMIN_OPTS create-jvm-options --add-opens=java.base/java.io=ALL-UNNAMED + # enable comet support ./asadmin $ASADMIN_OPTS set server-config.network-config.protocols.protocol.http-listener-1.http.comet-support-enabled="true" @@ -155,18 +158,18 @@ function final_setup(){ if [ "$DOCKER_BUILD" = "true" ] then - FILES_DIR="/usr/local/payara5/glassfish/domains/domain1/files" + FILES_DIR="/usr/local/payara6/glassfish/domains/domain1/files" RSERVE_HOST="localhost" RSERVE_PORT="6311" RSERVE_USER="rserve" RSERVE_PASS="rserve" HOST_ADDRESS="localhost\:8080" - pushd /usr/local/payara5/glassfish/bin/ + pushd /usr/local/payara6/glassfish/bin/ ./asadmin start-domain domain1 preliminary_setup - chmod -R 777 /usr/local/payara5/ - rm -rf /usr/local/payara5/glassfish/domains/domain1/generated - rm -rf /usr/local/payara5/glassfish/domains/domain1/applications + chmod -R 777 /usr/local/payara6/ + rm -rf /usr/local/payara6/glassfish/domains/domain1/generated + rm -rf /usr/local/payara6/glassfish/domains/domain1/applications popd exit 0 fi @@ -276,7 +279,7 @@ if [ ! -d "$DOMAIN_DIR" ] exit 2 fi -echo "Setting up your app. server (Payara5) to support Dataverse" +echo "Setting up your app. server (Payara) to support Dataverse" echo "Payara directory: "$GLASSFISH_ROOT echo "Domain directory: "$DOMAIN_DIR diff --git a/scripts/installer/default.config b/scripts/installer/default.config index 312dd2cb2d8..8647cd02416 100644 --- a/scripts/installer/default.config +++ b/scripts/installer/default.config @@ -1,7 +1,7 @@ [glassfish] HOST_DNS_ADDRESS = localhost GLASSFISH_USER = dataverse -GLASSFISH_DIRECTORY = /usr/local/payara5 +GLASSFISH_DIRECTORY = /usr/local/payara6 GLASSFISH_ADMIN_USER = admin GLASSFISH_ADMIN_PASSWORD = secret GLASSFISH_HEAP = 2048 diff --git a/scripts/installer/install b/scripts/installer/install deleted file mode 100755 index 2208f014606..00000000000 --- a/scripts/installer/install +++ /dev/null @@ -1,1538 +0,0 @@ -#!/usr/bin/perl - -use strict; -use warnings; -use Getopt::Long; -use Socket; -use File::Copy; - -# command line options: - -my $verbose; -my $postgresonly; -my $hostname; -my $gfuser; -my $gfdir; -my $mailserver; -my $noninteractive; -my $skipdatabasesetup; -my $force; -my $nogfpasswd; -my $admin_email; - -my ($rez) = GetOptions( - #"length=i" => \$length, # numeric - #"file=s" => \$data, # string - "verbose" => \$verbose, - "pg_only" => \$postgresonly, - "skip_db_setup" => \$skipdatabasesetup, - "hostname=s" => \$hostname, - "gfuser=s" => \$gfuser, - "gfdir=s" => \$gfdir, - "mailserver=s" => \$mailserver, - "y|yes" => \$noninteractive, - "f|force" => \$force, - "nogfpasswd" => \$nogfpasswd, - "admin_email=s" => \$admin_email, -); - -# openshift/docker-specific - name of the "pod" executing the installer: -my $pod_name = ""; -if (exists($ENV{'MY_POD_NAME'})) -{ - $pod_name = $ENV{'MY_POD_NAME'}; -} - -my $jq_exec_path = ""; -my $psql_exec_path = ""; -my $cwd; -my $WARFILE_LOCATION = "dataverse.war"; - - -my @CONFIG_VARIABLES; - -if ($postgresonly) -{ - @CONFIG_VARIABLES = - ( 'POSTGRES_SERVER', 'POSTGRES_PORT', 'POSTGRES_DATABASE', 'POSTGRES_USER', 'POSTGRES_PASSWORD', 'POSTGRES_ADMIN_PASSWORD' ); - -} -else -{ - - @CONFIG_VARIABLES = ( - 'HOST_DNS_ADDRESS', - 'GLASSFISH_USER', - 'GLASSFISH_DIRECTORY', - 'ADMIN_EMAIL', - 'MAIL_SERVER', - - 'POSTGRES_SERVER', - 'POSTGRES_PORT', - 'POSTGRES_ADMIN_PASSWORD', - 'POSTGRES_DATABASE', - 'POSTGRES_USER', - 'POSTGRES_PASSWORD', - - 'SOLR_LOCATION', - - 'RSERVE_HOST', - 'RSERVE_PORT', - 'RSERVE_USER', - 'RSERVE_PASSWORD', - - 'DOI_USERNAME', - 'DOI_PASSWORD', - 'DOI_BASEURL', - 'DOI_DATACITERESTAPIURL' - - ); -} - -my %CONFIG_DEFAULTS; - -&read_config_defaults("default.config"); - -my %CONFIG_PROMPTS; -my %CONFIG_COMMENTS; - -&read_interactive_config_values("interactive.config"); - -my $API_URL = "http://localhost:8080/api"; - -# jodbc.postgresql.org recommends 4.2 for Java 8. -# updated drivers may be obtained from -# https://jdbc.postgresql.org/download.html -my $postgres_jdbc = "postgresql-42.2.12.jar"; - -# 0. A few preliminary checks: - -# 0a. OS: - -my $uname_out = `uname -a`; - -my @uname_tokens = split( " ", $uname_out ); - -my $WORKING_OS; -if ( $uname_tokens[0] eq "Darwin" ) { - print "\nThis appears to be a MacOS X system; good.\n"; - # TODO: check the OS version - - $WORKING_OS = "MacOSX"; -} -elsif ( $uname_tokens[0] eq "Linux" ) { - if ( -f "/etc/redhat-release" ) { - print "\nThis appears to be a RedHat system; good.\n"; - $WORKING_OS = "RedHat"; - # TODO: check the distro version - } - else { - print "\nThis appears to be a non-RedHat Linux system;\n"; - print "this installation *may* succeed; but we're not making any promises!\n"; - $WORKING_OS = "Linux"; - } -} else { - print "\nWARNING: This appears to be neither a Linux or MacOS X system!\n"; - print "This installer script will most likely fail. Please refer to the\n"; - print "DVN Installers Guide for more information.\n\n"; - - $WORKING_OS = "Unknown"; - - unless ($noninteractive) { - exit 0; - } - - print "(Normally we would stop right there; but since the \"--yes\" option was specified, we'll attempt to continue)\n\n"; - -} - - -# 0b. host name: - -if ($hostname) { - $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} = $hostname; -} else { - my $hostname_from_cmdline = `hostname`; - chop $hostname_from_cmdline; - - $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} = $hostname_from_cmdline; -} - -# 0c. check if there is the default.config file with the pre-set configuration values: - -# read default configuration values from tab separated file "default.config" if it exists -# moved after the $hostname_from_cmdline section to avoid excessively complicating the logic -# of command line argument, automatic selection, or config file. -# -# NOTE: if the file contain any Postgres configuration (for example: "POSTGRES_USER dvnApp") -# but an environmental variable with the same name exists - THE ENV. VAR WILL WIN! (don't ask) -# (actually this is to accommodate the openshift docker deployment scenario) - -sub trim { my $s = shift; $s =~ s/^\s+|\s+$//g; return $s }; - -#my $config_default_file = "default.config"; -# -#if ( -e $config_default_file ) -#{ -# print("loading default configuration values from $config_default_file\n"); -# open( my $inp_cfg, $config_default_file ); -# while( my $ln = <$inp_cfg> ) -# { -# my @xs = split('\t', $ln ); -# if ( 2 == @xs ) -# { -# my $k = $xs[0]; -# my $v = trim($xs[1]); -## if (defined $ENV{$k} && ($k eq "POSTGRES_USER" || $k eq "POSTGRES_PASSWORD")) { -## $v = $ENV{$k}; -## } -## if (defined $ENV{'POSTGRESQL_ADMIN_PASSWORD'} && $k eq "POSTGRES_ADMIN_PASSWORD") { -## $v = $ENV{'POSTGRESQL_ADMIN_PASSWORD'}; -## } -# $CONFIG_DEFAULTS{$k}=$v; -# } -# } -#} -#else -#{ -# print("using hard-coded default configuration values (no $config_default_file available)\n"); -#} - -# 0d. current OS user. (the first one we find wins) - -my $current_user = $ENV{LOGNAME} || $ENV{USER} || getpwuid($<); - -# if the username was specified on the command-line, it takes precendence: -if ($gfuser) { - print "Using CLI-specified user $gfuser.\n"; - $CONFIG_DEFAULTS{'GLASSFISH_USER'} = $gfuser; -} - - -if (!$CONFIG_DEFAULTS{'GLASSFISH_USER'} || !$noninteractive) { - $CONFIG_DEFAULTS{'GLASSFISH_USER'} = $current_user; - print "using $current_user.\n"; -} - - -# prefer that we not install as root. -unless ( $< != 0 ) { - print "####################################################################\n"; - print " It is recommended that this script not be run as root.\n"; - print " Consider creating the service account \"dataverse\", giving it ownership\n"; - print " on the glassfish/domains/domain1/ and glassfish/lib/ directories,\n"; - print " along with the JVM-specified files.dir location, and designate\n"; - print " that account to launch and run the Application Server (Payara),\n"; - print " AND use that user account to run this installer.\n"; - print "####################################################################\n"; - - unless ($noninteractive) - { - print "\nPress any key to continue, or ctrl-C to exit the installer...\n\n"; - system "stty cbreak /dev/tty 2>&1"; - unless ($noninteractive) { - my $key = getc(STDIN); - } - system "stty -cbreak /dev/tty 2>&1"; - print "\n"; - } -} - -# ensure $gfuser exists or bail -my $gfidcmd="id $CONFIG_DEFAULTS{'GLASSFISH_USER'} > /dev/null"; -my $gfreturncode=system($gfidcmd); -if ($gfreturncode != 0) { - die "Couldn't find user $gfuser. Please ensure the account exists and is readable by the user running this installer.\n"; -} - -# 0e. the following 2 options can also be specified on the command line, and -# also take precedence over the default values that are hard-coded and/or -# provided in the default.config file: - -if ($mailserver) { - $CONFIG_DEFAULTS{'MAIL_SERVER'} = $mailserver; -} - -if ($gfdir) { - $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} = $gfdir; -} - -# 1. CHECK FOR SOME MANDATORY COMPONENTS (WAR FILE, ETC.) -# since we can't do anything without these things in place, better check for -# them before we go into the interactive config mode. -# (skip if this is a database-only setup) - -unless ($postgresonly) -{ -# 1a. war file: - print "\nChecking if the application .war file is available... "; - -# if this installer section is running out of the installer zip bundle directory, -# the war file will be sitting right here, named "dataverse.war": - - $WARFILE_LOCATION = "dataverse.war"; - -# but if it's not here, this is probably a personal development -# setup, so their build should be up in their source tree: - - unless ( -f $WARFILE_LOCATION ) { - my $DATAVERSE_VERSION = ""; - my $DATAVERSE_POM_FILE = "../../modules/dataverse-parent/pom.xml"; - if ( -f $DATAVERSE_POM_FILE ) - { - open DPF, $DATAVERSE_POM_FILE; - my $pom_line; - while ($pom_line=) - { - chop $pom_line; - if ($pom_line =~/^[ \t]*([0-9\.]+)<\/revision>/) - { - $DATAVERSE_VERSION=$1; - last; - } - } - close DPF; - - if ($DATAVERSE_VERSION ne "") { - $WARFILE_LOCATION = "../../target/dataverse-" . $DATAVERSE_VERSION . ".war"; - } - } - } - -# But, if the war file cannot be found in either of the 2 -# places - we'll just have to give up: - - unless ( -f $WARFILE_LOCATION ) { - print "\nWARNING: Can't find the project .war file!\n"; - print "\tAre you running the installer in the right directory?\n"; - print "\tHave you built the war file?\n"; - print "\t(if not, build the project and run the installer again)\n"; - - exit 0; - } - print " Yes, it is!\n"; - - -# 1b. check and remember the working dir: - chomp( $cwd = `pwd` ); - -# 1d. jq executable: - - my $sys_path = $ENV{'PATH'}; - my @sys_path_dirs = split( ":", $sys_path ); - - if ( $pod_name ne "start-glassfish") # Why is that again? - { - for my $sys_path_dir (@sys_path_dirs) { - if ( -x $sys_path_dir . "/jq" ) { - $jq_exec_path = $sys_path_dir; - last; - } - } - if ( $jq_exec_path eq "" ) { - print STDERR "\nERROR: I haven't been able to find the jq command in your PATH! Please install it from http://stedolan.github.io/jq/\n"; - exit 1; - - } - } - -} - - -# 2. INTERACTIVE CONFIG SECTION: - -print "\nWelcome to the Dataverse installer.\n"; -unless ($postgresonly) { - print "You will be guided through the process of setting up a NEW\n"; - print "instance of the dataverse application\n"; -} -else { - print "You will be guided through the process of configuring your\n"; - print "PostgreSQL database for use by the Dataverse application.\n"; -} - -my $yesno; - -unless ($noninteractive) -{ - print "\nATTENTION: As of Dataverse v.4.19, we are offering a new, experimental \n"; - print "version of the installer script, implemented in Python. It will eventually \n"; - print "replace this script (implemented in Perl). Consult the file README_python.txt \n"; - print "for more information on how to run it. \n"; - - print "\nWould you like to exit and use the new installer instead? [y/n] "; - $yesno = <>; - chop $yesno; - - while ( $yesno ne "y" && $yesno ne "n" ) { - print "Please enter 'y' or 'n'!\n"; - print "(or ctrl-C to exit the installer)\n"; - $yesno = <>; - chop $yesno; - } - - exit 0 if $yesno eq "y"; -} - -ENTERCONFIG: - -print "\n"; -print "Please enter the following configuration values:\n"; -print "(hit [RETURN] to accept the default value)\n"; -print "\n"; - -for my $ENTRY (@CONFIG_VARIABLES) -{ - my $config_prompt = $CONFIG_PROMPTS{$ENTRY}; - my $config_comment = $CONFIG_COMMENTS{$ENTRY}; - - if ( $config_comment eq '' ) - { - print $config_prompt . ": "; - print "[" . $CONFIG_DEFAULTS{$ENTRY} . "] "; - } - else - { - print $config_prompt . $config_comment; - print "[" . $CONFIG_DEFAULTS{$ENTRY} . "] "; - } - - my $user_entry = ""; - - # ($noninteractive means the installer is being run in the non-interactive mode; it will use - # the default values specified so far, without prompting the user for alternative values)\ - unless ($noninteractive) - { - $user_entry = <>; - chop $user_entry; - - if ( $user_entry ne "" ) { - $CONFIG_DEFAULTS{$ENTRY} = $user_entry; - } - - # for some values, we'll try to do some validation right here, in real time: - - if ($ENTRY eq 'ADMIN_EMAIL') - { - $user_entry = $CONFIG_DEFAULTS{$ENTRY}; - my $attempts = 0; - while ($user_entry !~/[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,4}/) - { - $attempts++; - print "Please enter a valid email address: "; - $user_entry = <>; - chop $user_entry; - } - - if ($attempts) - { - print "OK, looks legit.\n"; - $CONFIG_DEFAULTS{$ENTRY} = $user_entry; - } - } - elsif ($ENTRY eq 'GLASSFISH_DIRECTORY') - { - # CHECK IF GLASSFISH DIR LOOKS OK: - print "\nChecking your Glassfish installation..."; - - my $g_dir = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'}; - - - unless ( -d $g_dir . "/glassfish/domains/domain1" ) - { - while ( !( -d $g_dir . "/glassfish/domains/domain1" ) ) - { - print "\nInvalid Glassfish directory " . $g_dir . "!\n"; - print "Enter the root directory of your Glassfish installation:\n"; - print "(Or ctrl-C to exit the installer): "; - - $g_dir = <>; - chop $g_dir; - } - } - - # verify that we can write in the Glassfish directory - # (now that we are no longer requiring to run the installer as root) - - my @g_testdirs = ( "/glassfish/domains/domain1", - "/glassfish/domains/domain1/config", - "/glassfish/lib"); - - for my $test_dir (@g_testdirs) - { - if (!(-w ($g_dir . $test_dir))) - { - print "\n"; - die("ERROR: " . $g_dir . $test_dir . " not writable to the user running the installer! Check permissions on Payara5 hierarchy.\n"); - } - } - - - - print "$g_dir looks OK!\n"; - $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} = $g_dir; - - } - elsif ($ENTRY eq 'MAIL_SERVER') - { - my $smtp_server = ""; - while (! &validate_smtp_server() ) - { - print "Enter a valid SMTP (mail) server:\n"; - print "(Or ctrl-C to exit the installer): "; - - $smtp_server = <>; - chop $smtp_server; - - $CONFIG_DEFAULTS{'MAIL_SERVER'} = $smtp_server unless $smtp_server eq ''; - } - - print "\nOK, we were able to establish connection to the SMTP server you have specified.\n"; - print "Please note that you *may* need to configure some extra settings before your \n"; - print "Dataverse can send email. Please consult the \"Mail Host Configuration & Authentication\"\n"; - print "section of the installation guide (http://guides.dataverse.org/en/latest/installation/installation-main.html)\n"; - print "for more information.\n"; - } - } - - print "\n"; -} - -# 2b. CONFIRM VALUES ENTERED: - -print "\nOK, please confirm what you've entered:\n\n"; - -for my $ENTRY (@CONFIG_VARIABLES) { - print $CONFIG_PROMPTS{$ENTRY} . ": " . $CONFIG_DEFAULTS{$ENTRY} . "\n"; -} - -if ($noninteractive) { - $yesno = "y"; -} -else { - print "\nIs this correct? [y/n] "; - $yesno = <>; - chop $yesno; -} - -while ( $yesno ne "y" && $yesno ne "n" ) { - print "Please enter 'y' or 'n'!\n"; - print "(or ctrl-C to exit the installer)\n"; - $yesno = <>; - chop $yesno; -} - -if ( $yesno eq "n" ) { - goto ENTERCONFIG; -} - -# 3. SET UP POSTGRES USER AND DATABASE - -unless($pod_name eq "start-glassfish" || $pod_name eq "dataverse-glassfish-0" || $skipdatabasesetup) { - &setup_postgres(); -# (there's no return code - if anything goes wrong, the method will exit the script, with some diagnostic messages for the user) - print "\nOK, done.\n"; - - if ($postgresonly) - { - exit 0; - } -} - -# 5. CONFIGURE PAYARA - -my $glassfish_dir = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'}; - -my $done = &setup_appserver(); - -# Check if the App is running: - -unless (( - my $exit_code = - system( $glassfish_dir . "/bin/asadmin list-applications | grep -q '^dataverse'" ) - ) == 0 ) -{ - # If the "asadmin list-applications" has failed, it may only mean that an earlier - # "asadmin login" had failed, and asadmin is now failing to run without the user - # supplying the username and password. (And the fact that we are trying to pile the - # output to grep prevents it from providing the prompts). - # So before we give up, we'll try an alternative: - - unless (( - my $exit_code_2 = - system( "curl http://localhost:8080/robots.txt | grep -q '^User-agent'" ) - ) == 0 ) - { - print STDERR "It appears that the Dataverse application is not running...\n"; - print STDERR "Even though the \"asadmin deploy\" command had succeeded earlier.\n\n"; - print STDERR "Aborting - sorry...\n\n"; - - exit 1; - } -} - - -print "\nOK, the Dataverse application appears to be running...\n\n"; - -# Run the additional setup scripts, that populate the metadata block field values, create users -# and dataverses, etc. - -unless ( -d "data" && -f "setup-datasetfields.sh" && -f "setup-users.sh" && -f "setup-dvs.sh" && -f "setup-all.sh" ) { - chdir("../api"); -} - -unless ( -d "data" && -f "setup-datasetfields.sh" && -f "setup-users.sh" && -f "setup-dvs.sh" && -f "setup-builtin-roles.sh" && -f "setup-all.sh" ) { - print "\nERROR: Can't find the metadata and user/dataverse setup scripts!\n"; - print "\tAre you running the installer in the right directory?\n"; - exit 1; -} - -# if there's an admin_email set from arguments, replace the value in `dv-root.json` (called by `setup-all.sh`) -if ($admin_email) -{ - print "setting contact email for root dataverse to: $admin_email\n"; - set_root_contact_email( $admin_email ); -} -else -{ - print "using default contact email for root dataverse\n"; -} - -for my $script ( "setup-all.sh" ) { - # (there's only 1 setup script to run now - it runs all the other required scripts) - print "Executing post-deployment setup script " . $script . "... "; - - my $my_hostname = $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'}; - - # We used to filter the supplied scripts, replacing "localhost" and the port, in - # case they are running Dataverse on a different port... Now we are simply requiring - # that the port 8080 is still configured in domain.xml when they are running the - # installer: - my $run_script; - #if ( $my_hostname ne "localhost" ) { - # system( "sed 's/localhost:8080/$my_hostname/g' < " . $script . " > tmpscript.sh; chmod +x tmpscript.sh" ); - # $run_script = "tmpscript.sh"; - #} - #else { - $run_script = $script; - #} - - unless ( my $exit_code = system( "./" . $run_script . " > $run_script.$$.log 2>&1") == 0 ) - { - print "\nERROR executing script " . $script . "!\n"; - exit 1; - } - print "done!\n"; -} - -# SOME ADDITIONAL SETTINGS THAT ARE NOT TAKEN CARE OF BY THE setup-all SCRIPT -# NEED TO BE CONFIGURED HERE: - -print "Making additional configuration changes...\n\n"; - - -# a. Configure the Admin email in the Dataverse settings: - -print "Executing " . "curl -X PUT -d " . $CONFIG_DEFAULTS{'ADMIN_EMAIL'} . " " . $API_URL . "/admin/settings/:SystemEmail" . "\n"; - -my $exit_code = system("curl -X PUT -d " . $CONFIG_DEFAULTS{'ADMIN_EMAIL'} . " " . $API_URL . "/admin/settings/:SystemEmail"); -if ( $exit_code ) -{ - print "WARNING: failed to configure the admin email in the Dataverse settings!\n\n"; -} -else -{ - print "OK.\n\n"; -} - -# b. If this installation is going to be using a remote SOLR search engine service, configure its location in the settings: - -if ($CONFIG_DEFAULTS{'SOLR_LOCATION'} ne 'LOCAL') -{ - print "Executing " . "curl -X PUT -d " . $CONFIG_DEFAULTS{'SOLR_LOCATION'} . " " . $API_URL . "/admin/settings/:SolrHostColonPort" . "\n"; - my $exit_code = system("curl -X PUT -d " . $CONFIG_DEFAULTS{'SOLR_LOCATION'} . " " . $API_URL . "/admin/settings/:SolrHostColonPort"); - if ( $exit_code ) - { - print "WARNING: failed to configure the location of the remote SOLR service!\n\n"; - } - else - { - print "OK.\n\n"; - } -} - - - -chdir($cwd); - -print "\n\nYou should now have a running Dataverse instance at\n"; -print " http://" . $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} . ":8080\n\n\n"; - -if ($WARFILE_LOCATION =~/([0-9]\.[0-9]\.[0-9])\.war$/) -{ - my $version = $1; - print "If this is a personal development installation, we recommend that you undeploy the currently-running copy \n"; - print "of the application, with the following asadmin command:\n\n"; - print "\t" . $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} . '/bin/asadmin undeploy dataverse-' . $version . "\n\n"; - print "before attempting to deploy from your development environment in NetBeans.\n\n"; -} - - -print "\nYour Dataverse has been configured to use DataCite, to register DOI global identifiers in the \n"; -print "test name space \"10.5072\" with the \"shoulder\" \"FK2\"\n"; -print "However, you have to contact DataCite (support\@datacite.org) and request a test account, before you \n"; -print "can publish datasets. Once you receive the account name and password, add them to your domain.xml,\n"; -print "as the following two JVM options:\n"; -print "\t-Ddoi.username=...\n"; -print "\t-Ddoi.password=...\n"; -print "and restart payara5\n"; -print "If this is a production Dataverse and you are planning to register datasets as \n"; -print "\"real\", non-test DOIs or Handles, consult the \"Persistent Identifiers and Publishing Datasets\"\n"; -print "section of the Installataion guide, on how to configure your Dataverse with the proper registration\n"; -print "credentials.\n\n"; - - - -# (going to skip the Rserve check; it's no longer a required, or even a recommended component) - -exit 0; - -# 9. FINALLY, CHECK IF RSERVE IS RUNNING: -print "\n\nFinally, checking if Rserve is running and accessible...\n"; - -unless ( $CONFIG_DEFAULTS{'RSERVE_PORT'} =~ /^[0-9][0-9]*$/ ) { - print $CONFIG_DEFAULTS{'RSERVE_HOST'} . " does not look like a valid port number,\n"; - print "defaulting to 6311.\n\n"; - - $CONFIG_DEFAULTS{'RSERVE_PORT'} = 6311; -} - -my ( $rserve_iaddr, $rserve_paddr, $rserve_proto ); - -unless ( $rserve_iaddr = inet_aton( $CONFIG_DEFAULTS{'RSERVE_HOST'} ) ) { - print STDERR "Could not look up $CONFIG_DEFAULTS{'RSERVE_HOST'},\n"; - print STDERR "the host you specified as your R server.\n"; - print STDERR "\nDVN can function without a working R server, but\n"; - print STDERR "much of the functionality concerning running statistics\n"; - print STDERR "and analysis on quantitative data will not be available.\n"; - print STDERR "Please consult the Installers guide for more info.\n"; - - exit 0; -} - -$rserve_paddr = sockaddr_in( $CONFIG_DEFAULTS{'RSERVE_PORT'}, $rserve_iaddr ); -$rserve_proto = getprotobyname('tcp'); - -unless ( socket( SOCK, PF_INET, SOCK_STREAM, $rserve_proto ) - && connect( SOCK, $rserve_paddr ) ) -{ - print STDERR "Could not establish connection to $CONFIG_DEFAULTS{'RSERVE_HOST'}\n"; - print STDERR "on port $CONFIG_DEFAULTS{'RSERVE_PORT'}, the address you provided\n"; - print STDERR "for your R server.\n"; - print STDERR "DVN can function without a working R server, but\n"; - print STDERR "much of the functionality concerning running statistics\n"; - print STDERR "and analysis on quantitative data will not be available.\n"; - print STDERR "Please consult the \"Installing R\" section in the Installers guide\n"; - print STDERR "for more info.\n"; - - exit 0; - -} - -close(SOCK); -print "\nOK!\n"; - -# 5. CONFIGURE PAYARA -sub setup_appserver { - my $success = 1; - my $failure = 0; - - my $glassfish_dir = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'}; - - print "\nProceeding with the app. server (Payara5) setup.\n"; - -# 5a. DETERMINE HOW MUCH MEMORY TO GIVE TO GLASSFISH AS HEAP: - - my $gf_heap_default = "2048m"; - my $sys_mem_total = 0; - - if ( -e "/proc/meminfo" && open MEMINFO, "/proc/meminfo" ) { - # Linux - - while ( my $mline = ) { - if ( $mline =~ /MemTotal:[ \t]*([0-9]*) kB/ ) { - $sys_mem_total = $1; - } - } - - close MEMINFO; - -# TODO: Figure out how to determine the amount of memory when running in Docker -# because we're wondering if Dataverse can run in the free OpenShift Online -# offering that only gives you 1 GB of memory. Obviously, if this is someone's -# first impression of Dataverse, we want to to run well! What if you try to -# ingest a large file or perform other memory-intensive operations? For more -# context, see https://github.com/IQSS/dataverse/issues/4040#issuecomment-331282286 - if ( -e "/sys/fs/cgroup/memory/memory.limit_in_bytes" && open CGROUPMEM, "/sys/fs/cgroup/memory/memory.limit_in_bytes" ) { - print "INFO: This system has the CGROUP file /sys/fs/cgroup/memory/memory.limit_in_bytes\n"; - while ( my $limitline = ) { - ### TODO: NO, WE ARE NOT NECESSARILY IN DOCKER! - ###print "We must be running in Docker! Fancy!\n"; - # The goal of this cgroup check is for - # "Setting the heap limit for Glassfish/Payara to 750MB" - # to change to some other value, based on memory available. - print "INFO: /sys/fs/cgroup/memory/memory.limit_in_bytes: $limitline\n"; - my $limit_in_kb = $limitline / 1024; - print "INFO: CGROUP limit_in_kb = $limit_in_kb [ignoring]\n"; - # In openshift.json, notice how PostgreSQL and Solr have - # resources.limits.memory set to "256Mi". - # If you try to give the Dataverse/Glassfish container twice - # as much memory (512 MB) and allow $sys_mem_total to - # be set below, you should see the following: - # "Setting the heap limit for Glassfish to 192MB." - # FIXME: dataverse.war will not deploy with only 512 MB of memory. - # Again, the goal is 1 GB total (512MB + 256MB + 256MB) for - # Glassfish, PostgreSQL, and Solr to fit in the free OpenShift tier. - #print "setting sys_mem_total to: $limit_in_kb\n"; - #$sys_mem_total = $limit_in_kb; - } - close CGROUPMEM; - } - } - elsif ( -x "/usr/sbin/sysctl" ) - { - # MacOS X, probably... - - $sys_mem_total = `/usr/sbin/sysctl -n hw.memsize`; - chop $sys_mem_total; - if ( $sys_mem_total > 0 ) { - $sys_mem_total = int( $sys_mem_total / 1024 ); - # size in kb - } - } - - if ( $sys_mem_total > 0 ) { - # setting the default heap size limit to 3/8 of the available - # amount of memory: - $gf_heap_default = ( int( $sys_mem_total / ( 8 / 3 * 1024 ) ) ); - - print "\nSetting the heap limit for Payara5 to " . $gf_heap_default . "MB. \n"; - print "You may need to adjust this setting to better suit \n"; - print "your system.\n\n"; - - #$gf_heap_default .= "m"; - - } - else - { - print "\nCould not determine the amount of memory on your system.\n"; - print "Setting the heap limit for Payara5 to 2GB. You may need \n"; - print "to adjust the value to better suit your system.\n\n"; - } - - push @CONFIG_VARIABLES, "DEF_MEM_SIZE"; - $CONFIG_DEFAULTS{"DEF_MEM_SIZE"} = $gf_heap_default; - -# TODO: -# is the below still the case with Payara5? -# if the system has more than 4GB of memory (I believe), glassfish must -# be run with the 64 bit flag set explicitly (at least that was the case -# with the MacOS glassfish build...). Verify, and if still the case, -# add a check. - - print "\n*********************\n"; - print "PLEASE NOTE, SOME OF THE ASADMIN COMMANDS ARE GOING TO FAIL,\n"; - print "FOR EXAMPLE, IF A CONFIGURATION SETTING THAT WE ARE TRYING\n"; - print "TO CREATE ALREADY EXISTS; OR IF A JVM OPTION THAT WE ARE\n"; - print "DELETING DOESN'T. THESE \"FAILURES\" ARE NORMAL!\n"; - print "*********************\n\n"; - print "When/if asadmin asks you to \"Enter admin user name\",\n"; - print "it should be safe to hit return and accept the default\n"; - print "(which is \"admin\").\n"; - - print "\nPress any key to continue...\n\n"; - - unless ($noninteractive) - { - system "stty cbreak /dev/tty 2>&1"; - unless ($noninteractive) { - my $key = getc(STDIN); - } - system "stty -cbreak /dev/tty 2>&1"; - } - - print "\n"; - -# 5b. start domain, if not running: - - my $javacheck = `java -version`; - my $exitcode = $?; - unless ( $exitcode == 0 ) { - print STDERR "$javacheck\n" if $javacheck; - print STDERR "Do you have java installed?\n"; - exit 1; - } - my $DOMAIN = "domain1"; - my $DOMAIN_DOWN = - `$CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'}/bin/asadmin list-domains | grep "$DOMAIN " | grep "not running"`; - print STDERR $DOMAIN_DOWN . "\n"; - if ($DOMAIN_DOWN) { - print "Trying to start domain up...\n"; - if ( $current_user eq $CONFIG_DEFAULTS{'GLASSFISH_USER'} ){ - system( $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} . "/bin/asadmin start-domain domain1" ); - } - else - { - system( "sudo -u $CONFIG_DEFAULTS{'GLASSFISH_USER'} " . $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} . "/bin/asadmin start-domain domain1" ); - } - # TODO: (?) - retest that the domain is running now? - } - else - { - print "domain appears to be up...\n"; - } - -# 5c. create asadmin login, so that the user doesn't have to enter -# the username and password for every asadmin command, if -# access to :4848 is password-protected: - - system( $glassfish_dir. "/bin/asadmin login" ); - -# 5d. configure glassfish using ASADMIN commands: - - $success = &run_asadmin_script(); - -# CHECK EXIT STATUS, BARF IF SETUP SCRIPT FAILED: - - unless ($success) { - print "\nERROR! Failed to configure Payara5 domain!\n"; - print "(see the error messages above - if any)\n"; - print "Aborting...\n"; - - exit 1; - } - -# 5e. Additional config files: - - my $JHOVE_CONFIG = "jhove.conf"; - my $JHOVE_CONF_SCHEMA = "jhoveConfig.xsd"; - - - my $JHOVE_CONFIG_DIST = $JHOVE_CONFIG; - my $JHOVE_CONF_SCHEMA_DIST = $JHOVE_CONF_SCHEMA; - -# (if the installer is being run NOT as part of a distribution zipped bundle, but -# from inside the source tree - adjust the locations of the jhove config files: - - unless ( -f $JHOVE_CONFIG ) { - $JHOVE_CONFIG_DIST = "../../conf/jhove/jhove.conf"; - $JHOVE_CONF_SCHEMA_DIST = "../../conf/jhove/jhoveConfig.xsd"; - } - -# but if we can't find the files in either location, it must mean -# that they are not running the script in the correct directory - so -# nothing else left for us to do but give up: - - unless ( -f $JHOVE_CONFIG_DIST && -f $JHOVE_CONF_SCHEMA_DIST ) { - print "\nERROR! JHOVE configuration files not found in the config dir!\n"; - print "(are you running the installer in the right directory?\n"; - print "Aborting...\n"; - exit 1; - } - - print "\nCopying additional configuration files... "; - - #system( "/bin/cp -f " . $JHOVE_CONF_SCHEMA_DIST . " " . $glassfish_dir . "/glassfish/domains/domain1/config" ); - my $jhove_success = copy ($JHOVE_CONF_SCHEMA_DIST, $glassfish_dir . "/glassfish/domains/domain1/config"); - unless ($jhove_success) - { - print "\n*********************\n"; - print "ERROR: failed to copy jhove config file into " . $glassfish_dir . "/glassfish/domains/domain1/config - do you have write permission in that directory?"; - exit 1; - } - -# The JHOVE conf file has an absolute PATH of the JHOVE config schema file (uh, yeah...) -# - so it may need to be readjusted here: - - if ( $glassfish_dir ne "/usr/local/payara5" ) - { - system( "sed 's:/usr/local/payara5:$glassfish_dir:g' < " . $JHOVE_CONFIG_DIST . " > " . $glassfish_dir . "/glassfish/domains/domain1/config/" . $JHOVE_CONFIG); - } - else - { - system( "/bin/cp -f " . $JHOVE_CONFIG_DIST . " " . $glassfish_dir . "/glassfish/domains/domain1/config" ); - } - - print "done!\n"; - -# 5f. check if payara is running: -# TODO. - -# 5g. DEPLOY THE APPLICATION: - - print "\nAttempting to deploy the application.\n"; - print "Command line: " . $glassfish_dir . "/bin/asadmin deploy " . $WARFILE_LOCATION . "\n"; - unless (( - my $exit_code = - system( $glassfish_dir . "/bin/asadmin deploy " . $WARFILE_LOCATION ) - ) == 0 ) - { - print STDERR "Failed to deploy the application! WAR file: " . $WARFILE_LOCATION . ".\n"; - print STDERR "(exit code: " . $exit_code . ")\n"; - print STDERR "Aborting.\n"; - exit 1; - } - - - print "Finished configuring Payara and deploying the dataverse application. \n"; - - - return $success; -} - -sub run_asadmin_script { - my $success = 1; - my $failure = 0; - - # We are going to run a standalone shell script with a bunch of asadmin - # commands to set up all the Payara components for the application. - # All the parameters must be passed to that script as environmental - # variables: - - $ENV{'GLASSFISH_ROOT'} = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'}; - $ENV{'GLASSFISH_DOMAIN'} = "domain1"; - $ENV{'ASADMIN_OPTS'} = ""; - $ENV{'MEM_HEAP_SIZE'} = $CONFIG_DEFAULTS{'DEF_MEM_SIZE'}; - - $ENV{'DB_PORT'} = $CONFIG_DEFAULTS{'POSTGRES_PORT'}; - $ENV{'DB_HOST'} = $CONFIG_DEFAULTS{'POSTGRES_SERVER'}; - $ENV{'DB_NAME'} = $CONFIG_DEFAULTS{'POSTGRES_DATABASE'}; - $ENV{'DB_USER'} = $CONFIG_DEFAULTS{'POSTGRES_USER'}; - $ENV{'DB_PASS'} = $CONFIG_DEFAULTS{'POSTGRES_PASSWORD'}; - - $ENV{'RSERVE_HOST'} = $CONFIG_DEFAULTS{'RSERVE_HOST'}; - $ENV{'RSERVE_PORT'} = $CONFIG_DEFAULTS{'RSERVE_PORT'}; - $ENV{'RSERVE_USER'} = $CONFIG_DEFAULTS{'RSERVE_USER'}; - $ENV{'RSERVE_PASS'} = $CONFIG_DEFAULTS{'RSERVE_PASSWORD'}; - $ENV{'DOI_BASEURL'} = $CONFIG_DEFAULTS{'DOI_BASEURL'}; - $ENV{'DOI_USERNAME'} = $CONFIG_DEFAULTS{'DOI_USERNAME'}; - $ENV{'DOI_PASSWORD'} = $CONFIG_DEFAULTS{'DOI_PASSWORD'}; - $ENV{'DOI_DATACITERESTAPIURL'} = $CONFIG_DEFAULTS{'DOI_DATACITERESTAPIURL'}; - - $ENV{'HOST_ADDRESS'} = $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'}; - - my ($mail_server_host, $mail_server_port) = split (":", $CONFIG_DEFAULTS{'MAIL_SERVER'}); - - $ENV{'SMTP_SERVER'} = $mail_server_host; - - if ($mail_server_port) - { - $ENV{'SMTP_SERVER_PORT'} = $mail_server_port; - } - - $ENV{'FILES_DIR'} = - $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} . "/glassfish/domains/" . $ENV{'GLASSFISH_DOMAIN'} . "/files"; - - system("./as-setup.sh"); - - if ($?) { - return $failure; - } - return $success; -} - -sub create_pg_hash { - my $pg_username = shift @_; - my $pg_password = shift @_; - - my $encode_line = $pg_password . $pg_username; - - # for Redhat: - - ##print STDERR "executing /bin/echo -n $encode_line | md5sum\n"; - - my $hash; - if ( $WORKING_OS eq "MacOSX" ) { - $hash = `/bin/echo -n $encode_line | md5`; - } - else { - $hash = `/bin/echo -n $encode_line | md5sum`; - } - - chop $hash; - - $hash =~ s/ \-$//; - - if ( ( length($hash) != 32 ) || ( $hash !~ /^[0-9a-f]*$/ ) ) { - print STDERR "Failed to generate a MD5-encrypted password hash for the Postgres database.\n"; - exit 1; - } - - return $hash; -} - -sub validate_smtp_server { - my ( $mail_server_iaddr, $mail_server__paddr, $mail_server_proto, $mail_server_status ); - - $mail_server_status = 1; - - my $userentry = $CONFIG_DEFAULTS{'MAIL_SERVER'}; - my ($testserver, $testport) = split (":", $userentry); - - unless ( $mail_server_iaddr = inet_aton( $testserver ) ) { - print STDERR "Could not look up $testserver,\n"; - print STDERR "the host you specified as your mail server\n"; - $mail_server_status = 0; - } - - if ($mail_server_status) { - $testport = 25 unless $testport; - my $mail_server_paddr = sockaddr_in( $testport, $mail_server_iaddr ); - $mail_server_proto = getprotobyname('tcp'); - - unless ( socket( SOCK, PF_INET, SOCK_STREAM, $mail_server_proto ) - && connect( SOCK, $mail_server_paddr ) ) - { - print STDERR "Could not establish connection to $CONFIG_DEFAULTS{'MAIL_SERVER'},\n"; - print STDERR "the address you provided for your Mail server.\n"; - print STDERR "Please select a valid mail server, and try again.\n\n"; - - $mail_server_status = 0; - } - - close(SOCK); - } - - return $mail_server_status; -} - -# support function for set_root_contact_email -sub search_replace_file -{ - my ($infile, $pattern, $replacement, $outfile) = @_; - open (my $inp, $infile); - local $/ = undef; - my $txt = <$inp>; - close $inp; - $txt =~s/$pattern/$replacement/g; - open (my $opf, '>:encoding(UTF-8)', $outfile); - print $opf $txt; - close $opf; - return; -} -# set the email address for the default `dataverseAdmin` account -sub set_root_contact_email -{ - my ($contact_email) = @_; - my $config_json = "data/user-admin.json"; - search_replace_file($config_json,"\"email\":\"dataverse\@mailinator.com\"","\"email\":\"$contact_email\"",$config_json); - return; -} - - -sub setup_postgres { - my $pg_local_connection = 0; - my $pg_major_version = 0; - my $pg_minor_version = 0; - - -# We'll need to do a few things as the Postgres admin user; -# We'll assume the name of the admin user is "postgres". - my $POSTGRES_ADMIN_USER = "postgres"; - - - -##Handling container env - - if ($pod_name eq "start-glassfish") - { - # When we are in this openshift "start-glassfish" pod, we get all the - # Postgres configuration from the environmental variables. - print "Init container starting \n"; - $CONFIG_DEFAULTS{'POSTGRES_SERVER'} = $ENV{"POSTGRES_SERVER"} . "." . $ENV{"POSTGRES_SERVICE_HOST"}; - $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} = $ENV{"POSTGRES_DATABASE"}; - $CONFIG_DEFAULTS{'POSTGRES_USER'} = $ENV{"POSTGRES_USER"}; - $CONFIG_DEFAULTS{'POSTGRES_ADMIN_PASSWORD'} = $ENV{"POSTGRES_ADMIN_PASSWORD"}; - # there was a weird case of the postgres admin password option spelled differently in openshift.json - # - as "POSTGRESQL_ADMIN_PASSWORD"; I'm going to change it in openshift.json - but I'm leaving this - # next line here, just in case: (L.A. -- Sept. 2018) - $CONFIG_DEFAULTS{'POSTGRES_ADMIN_PASSWORD'} = $ENV{'POSTGRESQL_ADMIN_PASSWORD'}; - $CONFIG_DEFAULTS{'POSTGRES_PASSWORD'} = $ENV{"POSTGRES_PASSWORD"}; - } - - if ( $CONFIG_DEFAULTS{'POSTGRES_SERVER'} eq 'localhost' || $CONFIG_DEFAULTS{'POSTGRES_SERVER'} eq '127.0.0.1' ) - { - $pg_local_connection = 1; - } -# elsif ($postgresonly) -# { -# print "In the --pg_only mode the script can only be run LOCALLY,\n"; -# print "i.e., on the server where PostgresQL is running, with the\n"; -# print "Postgres server address as localhost - \"127.0.0.1\".\n"; -# exit 1; -# } - -#If it is executing in a container, proceed easy with this all-in-one block - - - - -# 3b. LOCATE THE psql EXECUTABLE: - - if ( $pod_name eq "start-glassfish"){ - $psql_exec_path = "/usr/bin" - } - else - { - my $sys_path = $ENV{'PATH'}; - my @sys_path_dirs = split( ":", $sys_path ); - - for my $sys_path_dir (@sys_path_dirs) { - - if ( -x $sys_path_dir . "/psql" ) { - $psql_exec_path = $sys_path_dir; - - last; - } - } - } - - my $psql_major_version = 0; - my $psql_minor_version = 0; - -# 3c. IF PSQL WAS FOUND IN THE PATH, CHECK ITS VERSION: - - unless ( $psql_exec_path eq "" ) { - open( PSQLOUT, $psql_exec_path . "/psql --version|" ); - - my $psql_version_line = ; - chop $psql_version_line; - close PSQLOUT; - - my ( $postgresName, $postgresNameLong, $postgresVersion ) = split( " ", $psql_version_line ); - - unless ( $postgresName eq "psql" && $postgresVersion =~ /^[0-9][0-9\.]*$/ ) { - print STDERR "\nWARNING: Unexpected output from psql command!\n"; - } - else - { - my (@psql_version_tokens) = split( '\.', $postgresVersion ); - - print "\n\nFound Postgres psql command, version $postgresVersion.\n\n"; - - $psql_major_version = $psql_version_tokens[0]; - $psql_minor_version = $psql_version_tokens[1]; - - $pg_major_version = $psql_major_version; - $pg_minor_version = $psql_minor_version; - - } - } - -# a frequent problem with MacOSX is that the copy of psql found in the PATH -# belongs to the older version of PostgresQL supplied with the OS, which happens -# to be incompatible with the newer builds from the Postgres project; which are -# recommended to be used with Dataverse. So if this is a MacOSX box, we'll -# check what other versions of PG are available, and select the highest version -# we can find: - - if ( $WORKING_OS eq "MacOSX" ) { - my $macos_pg_major_version = 0; - my $macos_pg_minor_version = 0; - - for $macos_pg_minor_version ( "9", "8", "7", "6", "5", "4", "3", "2", "1", "0" ) { - if ( -x "/Library/PostgreSQL/9." . $macos_pg_minor_version . "/bin/psql" ) { - $macos_pg_major_version = 9; - if ( ( $macos_pg_major_version > $psql_major_version ) - || ( $macos_pg_minor_version >= $psql_minor_version ) ) - { - $psql_exec_path = "/Library/PostgreSQL/9." . $macos_pg_minor_version . "/bin"; - $pg_major_version = $macos_pg_major_version; - $pg_minor_version = $macos_pg_minor_version; - } - last; - } - } - } - - my $psql_admin_exec = ""; - - if ( $psql_exec_path eq "" ) - { - if ( $pg_local_connection || $noninteractive) - { - print STDERR "\nERROR: I haven't been able to find the psql command in your PATH!\n"; - print STDERR "Please make sure PostgresQL is properly installed; if necessary, add\n"; - print STDERR "the location of psql to the PATH, then try again.\n\n"; - - exit 1; - } - else - { - print "WARNING: I haven't been able to find the psql command in your PATH!\n"; - print "But since we are configuring a Dataverse instance to use a remote Postgres server,\n"; - print "we can still set up the database by running a setup script on that remote server\n"; - print "(see below for instructions).\n"; - - } - } else { - - print "(Using psql version " . $pg_major_version . "." . $pg_minor_version . ": " . $psql_exec_path . "/psql)\n"; - - - $psql_admin_exec = "PGPASSWORD=" . $CONFIG_DEFAULTS{'POSTGRES_ADMIN_PASSWORD'} . "; export PGPASSWORD; " . $psql_exec_path; - $psql_exec_path = "PGPASSWORD=" . $CONFIG_DEFAULTS{'POSTGRES_PASSWORD'} . "; export PGPASSWORD; " . $psql_exec_path; - - print "Checking if we can talk to Postgres as the admin user...\n"; - } - -# 3d. CHECK IF WE CAN TALK TO POSTGRES AS THE ADMIN: - - if ($psql_exec_path eq "" || system( $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $POSTGRES_ADMIN_USER . " -d postgres -c 'SELECT * FROM pg_roles' > /dev/null 2>&1" ) ) - { - # No, we can't. :( - if ($pg_local_connection || $noninteractive) - { - # If Postgres is running locally, this is a fatal condition. - # We'll give them some (potentially) helpful pointers and exit. - - print "(Tried executing: " . $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $POSTGRES_ADMIN_USER . " -d postgres -c 'SELECT * FROM pg_roles' > /dev/null 2>&1) \n"; - print "Nope, I haven't been able to connect to the local instance of PostgresQL as the admin user.\n"; - print "\nIs postgresql running? \n"; - print " On a RedHat-like system, you can check the status of the daemon with\n\n"; - print " service postgresql start\n\n"; - print " On MacOSX, use Applications -> PostgresQL -> Start Server.\n"; - print " (or, if there's no \"Start Server\" item in your PostgresQL folder, \n"; - print " simply restart your MacOSX system!)\n"; - print "\nAlso, please make sure that the daemon is listening to network connections!\n"; - print " - at least on the localhost interface. (See \"Installing Postgres\" section\n"; - print " of the installation manual).\n"; - print "\nFinally, did you supply the correct admin password?\n"; - print " Don't know the admin password for your Postgres installation?\n"; - print " - then simply set the access level to \"trust\" temporarily (for localhost only!)\n"; - print " in your pg_hba.conf file. Again, please consult the \n"; - print " installation manual).\n"; - exit 1; - } - else - { - # If we are configuring the Dataverse instance to use a Postgres server - # running on a remote host, it is possible to configure the database - # without opening remote access for the admin user. They will simply - # have to run this script in the "postgres-only" mode on that server, locally, - # then resume the installation here: - print "(Tried executing: " . $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $POSTGRES_ADMIN_USER . " -d postgres -c 'SELECT * FROM pg_roles' > /dev/null 2>&1)\n\n"; - print "Haven't been able to connect to the remote Postgres server as the admin user.\n"; - print "(Or you simply don't have psql installed on this server)\n"; - print "It IS possible to configure a database for your Dataverse on a remote server,\n"; - print "without having admin access to that remote Postgres installation.\n\n"; - print "In order to do that, please copy the installer (the entire package) to the server\n"; - print "where PostgresQL is running and run the installer with the \"--pg_only\" option:\n\n"; - print " ./install --pg_only\n\n"; - - print "Press any key to continue the installation process once that has been\n"; - print "done. Or press ctrl-C to exit the installer.\n\n"; - - system "stty cbreak /dev/tty 2>&1"; - my $key = getc(STDIN); - system "stty -cbreak /dev/tty 2>&1"; - print "\n"; - } - } - else - { - print "Yes, we can!\n"; - - # ok, we can proceed with configuring things... - - print "\nConfiguring Postgres Database:\n"; - - # 4c. CHECK IF THIS DB ALREADY EXISTS: - - my $psql_command_dbcheck = - $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $POSTGRES_ADMIN_USER . " -c '' -d " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . ">/dev/null 2>&1"; - - if ( ( my $exitcode = system($psql_command_dbcheck) ) == 0 ) - { - if ($force) - { - print "WARNING! Database " - . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} - . " already exists but --force given... continuing.\n"; - } - else - { - print "WARNING! Database " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . " already exists!\n"; - - if ($noninteractive) - { - exit 1; - } - else - { - print "\nPress any key to continue, or ctrl-C to exit the installer...\n\n"; - - system "stty cbreak /dev/tty 2>&1"; - my $key = getc(STDIN); - system "stty -cbreak /dev/tty 2>&1"; - print "\n"; - - } - } - } - - # 3e. CHECK IF THIS USER ALREADY EXISTS: - - my $psql_command_rolecheck = - $psql_exec_path . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -c '' -d postgres " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " >/dev/null 2>&1"; - my $exitcode; - - if ( ( $exitcode = system($psql_command_rolecheck) ) == 0 ) - { - print "User (role) " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " already exists;\n"; - print "Proceeding."; - } - else - { - # 3f. CREATE DVN DB USER: - - print "\nCreating Postgres user (role) for the DVN:\n"; - - open TMPCMD, ">/tmp/pgcmd.$$.tmp"; - - # with md5-encrypted password: - my $pg_password_md5 = - &create_pg_hash( $CONFIG_DEFAULTS{'POSTGRES_USER'}, $CONFIG_DEFAULTS{'POSTGRES_PASSWORD'} ); - my $sql_command = - "CREATE ROLE \"" - . $CONFIG_DEFAULTS{'POSTGRES_USER'} - . "\" PASSWORD 'md5" - . $pg_password_md5 - . "' NOSUPERUSER CREATEDB CREATEROLE INHERIT LOGIN"; - - print TMPCMD $sql_command; - close TMPCMD; - - my $psql_commandline = $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $POSTGRES_ADMIN_USER . " -d postgres -f /tmp/pgcmd.$$.tmp >/dev/null 2>&1"; - - my $out = qx($psql_commandline 2>&1); - $exitcode = $?; - unless ( $exitcode == 0 ) - { - print STDERR "Could not create the DVN Postgres user role!\n"; - print STDERR "(SQL: " . $sql_command . ")\n"; - print STDERR "(psql exit code: " . $exitcode . ")\n"; - print STDERR "(STDERR and STDOUT was: " . $out . ")\n"; - exit 1; - } - - unlink "/tmp/pgcmd.$$.tmp"; - print "done.\n"; - } - - # 3g. CREATE DVN DB: - - print "\nCreating Postgres database:\n"; - - my $psql_command = - $psql_admin_exec - . "/createdb -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $POSTGRES_ADMIN_USER ." " - . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . " --owner=" - . $CONFIG_DEFAULTS{'POSTGRES_USER'}; - - my $out = qx($psql_command 2>&1); - $exitcode = $?; - unless ( $exitcode == 0 ) - { - print STDERR "Could not create Postgres database for the Dataverse app!\n"; - print STDERR "(command: " . $psql_command . ")\n"; - print STDERR "(psql exit code: " . $exitcode . ")\n"; - print STDERR "(STDOUT and STDERR: " . $out . ")\n"; - if ($force) - { - print STDERR "\ncalled with --force, continuing\n"; - } - else - { - print STDERR "\naborting the installation (sorry!)\n\n"; - exit 1; - } - } - } - -# Whether the user and the database were created locally or remotely, we'll now -# verify that we can talk to that database, with the credentials of the database -# user that we want the Dataverse application to be using: - - if ( $psql_exec_path ne "" && system( $psql_exec_path . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " -d " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . " -c 'SELECT * FROM pg_roles' > /dev/null 2>&1" ) ) - { - print STDERR "Oops, haven't been able to connect to the database " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . ",\n"; - print STDERR "running on " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . ", as user " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . ".\n\n"; - print STDERR "Aborting the installation (sorry!)\n"; - exit 1; - } -} - -sub read_config_defaults { - my $config_file = shift @_; - - unless ( -f $config_file ) - { - print STDERR "Can't find the config file " . $config_file . "!\n"; - exit 1; - } - - open CF, $config_file || die "Can't open config file " . $config_file . " for reading.\n"; - - while () - { - chop; - - if ( $_ =~/^[A-Z]/ && $_ =~/ *= */ ) - { - my ($name, $value) = split(/ *= */, $_, 2); - $CONFIG_DEFAULTS{$name} = $value; - } - } - close CF; -} - -sub read_interactive_config_values { - my $config_file = shift @_; - - unless ( -f $config_file ) - { - print STDERR "Can't find the config file " . $config_file . "!\n"; - exit 1; - } - - open CF, $config_file || die "Can't open config file " . $config_file . " for reading.\n"; - - my $mode = ""; - - while () - { - chop; - - if ( $_ eq "[prompts]" || $_ eq "[comments]" ) - { - $mode = $_; - } - - if ( $_ =~/^[A-Z]/ && $_ =~/ *= */ ) - { - my ($name, $value) = split(/ *= */, $_, 2); - - if ( $mode eq "[prompts]" ) - { - $CONFIG_PROMPTS{$name} = $value; - } - elsif ( $mode eq "[comments]" ) - { - $value =~s/\\n/\n/g; - $CONFIG_COMMENTS{$name} = $value; - } - } - } - close CF; -} diff --git a/scripts/installer/install.py b/scripts/installer/install.py index 5acb4d760a4..700c70dbc28 100644 --- a/scripts/installer/install.py +++ b/scripts/installer/install.py @@ -252,8 +252,8 @@ # 1d. check java version java_version = subprocess.check_output(["java", "-version"], stderr=subprocess.STDOUT).decode() print("Found java version "+java_version) - if not re.search('(1.8|11)', java_version): - sys.exit("Dataverse requires OpenJDK 1.8 or 11. Please make sure it's in your PATH, and try again.") + if not re.search('(17)', java_version): + sys.exit("Dataverse requires OpenJDK 17. Please make sure it's in your PATH, and try again.") # 1e. check if the setup scripts - setup-all.sh, are available as well, maybe? # @todo (?) @@ -314,7 +314,7 @@ gfDir = config.get('glassfish', 'GLASSFISH_DIRECTORY') while not test_appserver_directory(gfDir): print("\nInvalid Payara directory!") - gfDir = read_user_input("Enter the root directory of your Payara5 installation:\n(Or ctrl-C to exit the installer): ") + gfDir = read_user_input("Enter the root directory of your Payara installation:\n(Or ctrl-C to exit the installer): ") config.set('glassfish', 'GLASSFISH_DIRECTORY', gfDir) elif option == "mail_server": mailServer = config.get('system', 'MAIL_SERVER') @@ -511,12 +511,12 @@ try: copy2(jhoveConfigSchemaDist, gfConfigDir) # The JHOVE conf file has an absolute PATH of the JHOVE config schema file (uh, yeah...) - # and may need to be adjusted, if Payara is installed anywhere other than /usr/local/payara5: - if gfDir == "/usr/local/payara5": + # and may need to be adjusted, if Payara is installed anywhere other than /usr/local/payara6: + if gfDir == "/usr/local/payara6": copy2(jhoveConfigDist, gfConfigDir) else: - # use sed to replace /usr/local/payara5 in the distribution copy with the real gfDir: - sedCommand = "sed 's:/usr/local/payara5:"+gfDir+":g' < " + jhoveConfigDist + " > " + gfConfigDir + "/" + jhoveConfig + # use sed to replace /usr/local/payara6 in the distribution copy with the real gfDir: + sedCommand = "sed 's:/usr/local/payara6:"+gfDir+":g' < " + jhoveConfigDist + " > " + gfConfigDir + "/" + jhoveConfig subprocess.call(sedCommand, shell=True) print("done.") diff --git a/scripts/installer/installAppServer.py b/scripts/installer/installAppServer.py index 8b719ac09d1..698f5ba9a58 100644 --- a/scripts/installer/installAppServer.py +++ b/scripts/installer/installAppServer.py @@ -3,7 +3,7 @@ def runAsadminScript(config): # We are going to run a standalone shell script with a bunch of asadmin - # commands to set up all the app. server (payara5) components for the application. + # commands to set up all the app. server (payara6) components for the application. # All the parameters must be passed to that script as environmental # variables: os.environ['GLASSFISH_DOMAIN'] = "domain1"; diff --git a/scripts/installer/installUtils.py b/scripts/installer/installUtils.py index 7cc368de5f8..ff5e6eb708d 100644 --- a/scripts/installer/installUtils.py +++ b/scripts/installer/installUtils.py @@ -57,7 +57,7 @@ def test_appserver_directory(directory): #print("version: major: "+str(major_version)+", minor: "+str(minor_version)) - if major_version != 5 or minor_version < 201: + if major_version != 6 or minor_version < 2023: return False return True diff --git a/scripts/installer/interactive.config b/scripts/installer/interactive.config index 86ea926fe5d..ef8110c554f 100644 --- a/scripts/installer/interactive.config +++ b/scripts/installer/interactive.config @@ -26,7 +26,7 @@ DOI_BASEURL = Datacite URL DOI_DATACITERESTAPIURL = Datacite REST API URL [comments] HOST_DNS_ADDRESS = :(enter numeric IP address, if FQDN is unavailable) -GLASSFISH_USER = :This user will be running the App. Server (Payara5) service on your system.\n - If this is a dev. environment, this should be your own username; \n - In production, we suggest you create the account "dataverse", or use any other unprivileged user account\n: +GLASSFISH_USER = :This user will be running the App. Server (Payara) service on your system.\n - If this is a dev. environment, this should be your own username; \n - In production, we suggest you create the account "dataverse", or use any other unprivileged user account\n: GLASSFISH_DIRECTORY = GLASSFISH_REQUEST_TIMEOUT = :\n Defaults to 1800 seconds (30 minutes) ADMIN_EMAIL = :\n(please enter a valid email address!) diff --git a/scripts/tests/ec2-memory-benchmark/ec2-memory-benchmark-remote.sh b/scripts/tests/ec2-memory-benchmark/ec2-memory-benchmark-remote.sh index 0cfdd20c272..367aa214563 100755 --- a/scripts/tests/ec2-memory-benchmark/ec2-memory-benchmark-remote.sh +++ b/scripts/tests/ec2-memory-benchmark/ec2-memory-benchmark-remote.sh @@ -5,7 +5,7 @@ then EC2_HTTP_LOCATION="" fi -DATAVERSE_APP_DIR=/usr/local/payara5/glassfish/domains/domain1/applications/dataverse; export DATAVERSE_APP_DIR +DATAVERSE_APP_DIR=/usr/local/payara6/glassfish/domains/domain1/applications/dataverse; export DATAVERSE_APP_DIR # restart app server diff --git a/scripts/vagrant/install-dataverse.sh b/scripts/vagrant/install-dataverse.sh deleted file mode 100644 index c9873f7d3ec..00000000000 --- a/scripts/vagrant/install-dataverse.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env bash - -if [ ! -z "$1" ]; then - MAILSERVER=$1 - MAILSERVER_ARG="--mailserver $MAILSERVER" -fi -WAR=/dataverse/target/dataverse*.war -if [ ! -f $WAR ]; then - echo "no war file found... building" - #echo "Installing nss on CentOS 6 to avoid java.security.KeyException while building war file: https://github.com/IQSS/dataverse/issues/2744" - #yum install -y nss - su $SUDO_USER -s /bin/sh -c "cd /dataverse && source /etc/profile.d/maven.sh && mvn -q package" -fi -cd /dataverse/scripts/installer - -# move any pre-existing `default.config` file out of the way to avoid overwriting -pid=$$ -if [ -e default.config ]; then - cp default.config tmp-${pid}-default.config -fi - -# Switch to newer Python-based installer -python3 ./install.py --noninteractive --config_file="default.config" - -if [ -e tmp-${pid}-default.config ]; then # if we moved it out, move it back - mv -f tmp-${pid}-default.config default.config -fi - -echo "If "vagrant up" was successful (check output above) Dataverse is running on port 8080 of the Linux machine running within Vagrant, but this port has been forwarded to port 8088 of the computer you ran "vagrant up" on. For this reason you should go to http://localhost:8088 to see the Dataverse app running." - -echo "Please also note that the installation script has now started Payara, but has not set up an autostart mechanism for it.\nTherefore, the next time this VM is started, Payara must be started manually.\nSee https://guides.dataverse.org/en/latest/installation/prerequisites.html#launching-payara-on-system-boot for details." diff --git a/scripts/vagrant/rpmbuild.sh b/scripts/vagrant/rpmbuild.sh deleted file mode 100755 index f10830afb5b..00000000000 --- a/scripts/vagrant/rpmbuild.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/sh -rpm -Uvh http://dl.fedoraproject.org/pub/epel/7/x86_64/e/epel-release-7-7.noarch.rpm -yum install -y rpm-build httpd-devel libapreq2-devel R-devel diff --git a/scripts/vagrant/setup-counter-processor.sh b/scripts/vagrant/setup-counter-processor.sh deleted file mode 100755 index a418e8d6251..00000000000 --- a/scripts/vagrant/setup-counter-processor.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash -echo "Setting up counter-processor" -echo "Installing dependencies" -yum -y install unzip vim-enhanced -yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm -# EPEL provides Python 3.6.6, new enough (3.6.4 in .python-version) -yum -y install python36 jq -# "ensurepip" tip from https://stackoverflow.com/questions/50408941/recommended-way-to-install-pip3-on-centos7/52518512#52518512 -python3.6 -m ensurepip -# FIXME: actually use this dedicated "counter" user. -COUNTER_USER=counter -echo "Ensuring Unix user '$COUNTER_USER' exists" -useradd $COUNTER_USER || : -COMMIT='7974dad259465ba196ef639f48dea007cae8f9ac' -UNZIPPED_DIR="counter-processor-$COMMIT" -if [ ! -e $UNZIPPED_DIR ]; then - ZIP_FILE="${COMMIT}.zip" - echo "Downloading and unzipping $ZIP_FILE" - wget https://github.com/CDLUC3/counter-processor/archive/$ZIP_FILE - unzip $ZIP_FILE -fi -cd $UNZIPPED_DIR -echo "Installation of the GeoLite2 country database for counter-processor can no longer be automated. See the Installation Guide for the manual installation process." -pip3 install -r requirements.txt -# For now, parsing sample_logs/counter_2018-05-08.log -for i in `echo {00..31}`; do - # avoid errors like: No such file or directory: 'sample_logs/counter_2018-05-01.log' - touch sample_logs/counter_2018-05-$i.log -done -#LOG_GLOB="sample_logs/counter_2018-05-*.log" -#START_DATE="2018-05-08" -#END_DATE="2018-05-09" -CONFIG_FILE=/dataverse/scripts/vagrant/counter-processor-config.yaml python3.6 main.py diff --git a/scripts/vagrant/setup-solr.sh b/scripts/vagrant/setup-solr.sh deleted file mode 100755 index 70d3fc632a7..00000000000 --- a/scripts/vagrant/setup-solr.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash -echo "Setting up Solr" -dnf install -qy lsof -SOLR_USER=solr -SOLR_HOME=/usr/local/solr -mkdir $SOLR_HOME -chown $SOLR_USER:$SOLR_USER $SOLR_HOME -su $SOLR_USER -s /bin/sh -c "cp /dataverse/downloads/solr-8.11.1.tgz $SOLR_HOME" -su $SOLR_USER -s /bin/sh -c "cd $SOLR_HOME && tar xfz solr-8.11.1.tgz" -su $SOLR_USER -s /bin/sh -c "cd $SOLR_HOME/solr-8.11.1/server/solr && cp -r configsets/_default . && mv _default collection1" -su $SOLR_USER -s /bin/sh -c "cp /dataverse/conf/solr/8.11.1/schema*.xml $SOLR_HOME/solr-8.11.1/server/solr/collection1/conf/" -su $SOLR_USER -s /bin/sh -c "cp /dataverse/conf/solr/8.11.1/solrconfig.xml $SOLR_HOME/solr-8.11.1/server/solr/collection1/conf/solrconfig.xml" -su $SOLR_USER -s /bin/sh -c "cd $SOLR_HOME/solr-8.11.1 && bin/solr start && bin/solr create_core -c collection1 -d server/solr/collection1/conf/" -cp /dataverse/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr /etc/init.d/solr -chmod 755 /etc/init.d/solr -/etc/init.d/solr stop -/etc/init.d/solr start -chkconfig solr on diff --git a/scripts/vagrant/setup.sh b/scripts/vagrant/setup.sh deleted file mode 100644 index 0af4afb22af..00000000000 --- a/scripts/vagrant/setup.sh +++ /dev/null @@ -1,96 +0,0 @@ -#!/bin/bash -echo "Installing dependencies for Dataverse" - -# wget seems to be missing in box 'bento/centos-8.2' -dnf install -qy wget - -# python3 and psycopg2 for the Dataverse installer -dnf install -qy python3 python3-psycopg2 - -# JQ -echo "Installing jq for the setup scripts" -dnf install -qy epel-release -dnf install -qy jq - -echo "Adding Shibboleth yum repo" -cp /dataverse/conf/vagrant/etc/yum.repos.d/shibboleth.repo /etc/yum.repos.d -# Uncomment this (and other shib stuff below) if you want -# to use Vagrant (and maybe PageKite) to test Shibboleth. -#yum install -y shibboleth shibboleth-embedded-ds - -# java configuration et alia -dnf install -qy java-11-openjdk-devel httpd mod_ssl unzip -alternatives --set java /usr/lib/jvm/jre-11-openjdk/bin/java -java -version - -# maven included in centos8 requires 1.8.0 - download binary instead -wget -q https://archive.apache.org/dist/maven/maven-3/3.8.2/binaries/apache-maven-3.8.2-bin.tar.gz -tar xfz apache-maven-3.8.2-bin.tar.gz -mkdir /opt/maven -mv apache-maven-3.8.2/* /opt/maven/ -echo "export JAVA_HOME=/usr/lib/jvm/jre-openjdk" > /etc/profile.d/maven.sh -echo "export M2_HOME=/opt/maven" >> /etc/profile.d/maven.sh -echo "export MAVEN_HOME=/opt/maven" >> /etc/profile.d/maven.sh -echo "export PATH=/opt/maven/bin:${PATH}" >> /etc/profile.d/maven.sh -chmod 0755 /etc/profile.d/maven.sh - -# disable centos8 postgresql module and install postgresql13-server -dnf -qy module disable postgresql -dnf install -qy https://download.postgresql.org/pub/repos/yum/reporpms/EL-8-x86_64/pgdg-redhat-repo-latest.noarch.rpm -dnf install -qy postgresql13-server -/usr/pgsql-13/bin/postgresql-13-setup initdb -/usr/bin/systemctl stop postgresql-13 -cp /dataverse/conf/vagrant/var/lib/pgsql/data/pg_hba.conf /var/lib/pgsql/13/data/pg_hba.conf -/usr/bin/systemctl start postgresql-13 -/usr/bin/systemctl enable postgresql-13 - -PAYARA_USER=dataverse -echo "Ensuring Unix user '$PAYARA_USER' exists" -useradd $PAYARA_USER || : -SOLR_USER=solr -echo "Ensuring Unix user '$SOLR_USER' exists" -useradd $SOLR_USER || : -DOWNLOAD_DIR='/dataverse/downloads' -PAYARA_ZIP="$DOWNLOAD_DIR/payara-5.2022.3.zip" -SOLR_TGZ="$DOWNLOAD_DIR/solr-8.11.1.tgz" -if [ ! -f $PAYARA_ZIP ] || [ ! -f $SOLR_TGZ ]; then - echo "Couldn't find $PAYARA_ZIP or $SOLR_TGZ! Running download script...." - cd $DOWNLOAD_DIR && ./download.sh && cd - echo "Done running download script." -fi -PAYARA_USER_HOME=~dataverse -PAYARA_ROOT=/usr/local/payara5 -if [ ! -d $PAYARA_ROOT ]; then - echo "Copying $PAYARA_ZIP to $PAYARA_USER_HOME and unzipping" - su $PAYARA_USER -s /bin/sh -c "cp $PAYARA_ZIP $PAYARA_USER_HOME" - su $PAYARA_USER -s /bin/sh -c "cd $PAYARA_USER_HOME && unzip -q $PAYARA_ZIP" - # default.config defaults to /usr/local/payara5 so let's go with that - rsync -a $PAYARA_USER_HOME/payara5/ $PAYARA_ROOT/ -else - echo "$PAYARA_ROOT already exists" -fi - -#service shibd start -/usr/bin/systemctl stop httpd -cp /dataverse/conf/httpd/conf.d/dataverse.conf /etc/httpd/conf.d/dataverse.conf -mkdir -p /var/www/dataverse/error-documents -cp /dataverse/conf/vagrant/var/www/dataverse/error-documents/503.html /var/www/dataverse/error-documents -/usr/bin/systemctl start httpd -#curl -k --sslv3 https://pdurbin.pagekite.me/Shibboleth.sso/Metadata > /tmp/pdurbin.pagekite.me -#cp -a /etc/shibboleth/shibboleth2.xml /etc/shibboleth/shibboleth2.xml.orig -#cp -a /etc/shibboleth/attribute-map.xml /etc/shibboleth/attribute-map.xml.orig -# need more attributes, such as sn, givenName, mail -#cp /dataverse/conf/vagrant/etc/shibboleth/attribute-map.xml /etc/shibboleth/attribute-map.xml -# FIXME: automate this? -#curl 'https://www.testshib.org/cgi-bin/sp2config.cgi?dist=Others&hostname=pdurbin.pagekite.me' > /etc/shibboleth/shibboleth2.xml -#cp /dataverse/conf/vagrant/etc/shibboleth/shibboleth2.xml /etc/shibboleth/shibboleth2.xml -#service shibd restart -#curl -k --sslv3 https://pdurbin.pagekite.me/Shibboleth.sso/Metadata > /downloads/pdurbin.pagekite.me -#service httpd restart - -echo "#########################################################################################" -echo "# This is a Vagrant test box, so we're disabling firewalld. # -echo "# Re-enable it with $ sudo systemctl enable firewalld && sudo systemctl start firewalld #" -echo "#########################################################################################" -systemctl disable firewalld -systemctl stop firewalld diff --git a/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java index 2a3f2d50364..f1bfc3e290b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java @@ -3,8 +3,8 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.InputStream; -import javax.ejb.EJB; -import javax.inject.Inject; +import jakarta.ejb.EJB; +import jakarta.inject.Inject; import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; @@ -222,6 +222,11 @@ public GlobalId parsePersistentId(String fullIdentifierString) { if(!isConfigured()) { return null; } + // Occasionally, the protocol separator character ':' comes in still + // URL-encoded as %3A (usually as a result of the URL having been + // encoded twice): + fullIdentifierString = fullIdentifierString.replace("%3A", ":"); + int index1 = fullIdentifierString.indexOf(':'); if (index1 > 0) { // ':' found with one or more characters before it String protocol = fullIdentifierString.substring(0, index1); diff --git a/src/main/java/edu/harvard/iq/dataverse/AlternativePersistentIdentifier.java b/src/main/java/edu/harvard/iq/dataverse/AlternativePersistentIdentifier.java index 6fc7262925a..db3c6029a78 100644 --- a/src/main/java/edu/harvard/iq/dataverse/AlternativePersistentIdentifier.java +++ b/src/main/java/edu/harvard/iq/dataverse/AlternativePersistentIdentifier.java @@ -3,14 +3,14 @@ import java.io.Serializable; import java.util.Date; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Temporal; +import jakarta.persistence.TemporalType; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/ApiTokenPage.java b/src/main/java/edu/harvard/iq/dataverse/ApiTokenPage.java index 4838847e400..16ff4d266d8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ApiTokenPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ApiTokenPage.java @@ -5,14 +5,14 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.api.Util; -import java.sql.Timestamp; + import java.util.ArrayList; import java.util.List; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; /** * @todo Rename this to ApiTokenFragment? The separate page is being taken out diff --git a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java index 344032ef5e3..d03ebbc6f7b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java @@ -4,16 +4,16 @@ import edu.harvard.iq.dataverse.util.BundleUtil; import java.io.Serializable; import java.util.MissingResourceException; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.NamedNativeQueries; -import javax.persistence.NamedNativeQuery; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.NamedNativeQueries; +import jakarta.persistence.NamedNativeQuery; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java index 05f3e209632..8c96f98ce39 100644 --- a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java @@ -14,19 +14,19 @@ import java.util.ArrayList; import java.util.List; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; -import javax.persistence.TypedQuery; -import javax.ws.rs.ClientErrorException; -import javax.ws.rs.InternalServerErrorException; -import javax.ws.rs.ServerErrorException; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; +import jakarta.persistence.TypedQuery; +import jakarta.ws.rs.ClientErrorException; +import jakarta.ws.rs.InternalServerErrorException; +import jakarta.ws.rs.ServerErrorException; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; import org.apache.tika.Tika; diff --git a/src/main/java/edu/harvard/iq/dataverse/BannerMessage.java b/src/main/java/edu/harvard/iq/dataverse/BannerMessage.java index 4f465168580..214e26965fa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/BannerMessage.java +++ b/src/main/java/edu/harvard/iq/dataverse/BannerMessage.java @@ -4,13 +4,13 @@ import edu.harvard.iq.dataverse.util.BundleUtil; import java.io.Serializable; import java.util.Collection; -import javax.persistence.CascadeType; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.OneToMany; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.OneToMany; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/BannerMessageServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/BannerMessageServiceBean.java index 91b4128c545..0e757998d58 100644 --- a/src/main/java/edu/harvard/iq/dataverse/BannerMessageServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/BannerMessageServiceBean.java @@ -10,10 +10,10 @@ import java.util.Date; import java.util.List; import java.util.logging.Logger; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/BannerMessageText.java b/src/main/java/edu/harvard/iq/dataverse/BannerMessageText.java index dbae9a6dc27..ea2dd1b41fc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/BannerMessageText.java +++ b/src/main/java/edu/harvard/iq/dataverse/BannerMessageText.java @@ -6,13 +6,13 @@ package edu.harvard.iq.dataverse; import java.io.Serializable; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/CitationServlet.java b/src/main/java/edu/harvard/iq/dataverse/CitationServlet.java index f6b4e3dc99a..68c8d49ad7e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/CitationServlet.java +++ b/src/main/java/edu/harvard/iq/dataverse/CitationServlet.java @@ -6,14 +6,14 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.pidproviders.PidUtil; -import edu.harvard.iq.dataverse.util.StringUtil; + import java.io.IOException; -import java.io.PrintWriter; -import javax.ejb.EJB; -import javax.servlet.ServletException; -import javax.servlet.http.HttpServlet; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; + +import jakarta.ejb.EJB; +import jakarta.servlet.ServletException; +import jakarta.servlet.http.HttpServlet; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/ConfigureFragmentBean.java b/src/main/java/edu/harvard/iq/dataverse/ConfigureFragmentBean.java index d51a73fd2dc..bf509c33995 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ConfigureFragmentBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ConfigureFragmentBean.java @@ -16,10 +16,10 @@ import java.sql.Timestamp; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; import java.util.Date; diff --git a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabAlternate.java b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabAlternate.java index 5d5d9597746..9542cfe3f71 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabAlternate.java +++ b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabAlternate.java @@ -7,15 +7,15 @@ import java.io.Serializable; import java.util.Objects; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.Table; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Table; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java index 181d939f4a1..5dcce98a90f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java +++ b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java @@ -17,16 +17,16 @@ import java.util.Objects; import java.util.logging.Logger; import java.util.MissingResourceException; -import javax.persistence.CascadeType; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.ManyToOne; -import javax.persistence.OneToMany; -import javax.persistence.Table; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.OneToMany; +import jakarta.persistence.Table; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueConverter.java b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueConverter.java index 1d530e136ba..eadc13721b3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueConverter.java @@ -5,13 +5,13 @@ */ package edu.harvard.iq.dataverse; -import javax.ejb.EJB; -import javax.enterprise.inject.spi.CDI; +import jakarta.ejb.EJB; +import jakarta.enterprise.inject.spi.CDI; -import javax.faces.component.UIComponent; -import javax.faces.context.FacesContext; -import javax.faces.convert.Converter; -import javax.faces.convert.FacesConverter; +import jakarta.faces.component.UIComponent; +import jakarta.faces.context.FacesContext; +import jakarta.faces.convert.Converter; +import jakarta.faces.convert.FacesConverter; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueServiceBean.java index 0e9501414d0..4255c3b2dbc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueServiceBean.java @@ -6,11 +6,11 @@ package edu.harvard.iq.dataverse; import java.util.List; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.TypedQuery; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.TypedQuery; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java b/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java index 64723fff79a..2cb6f27c3e4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java +++ b/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java @@ -1,7 +1,7 @@ package edu.harvard.iq.dataverse; import java.io.Serializable; import java.util.List; -import javax.persistence.*; +import jakarta.persistence.*; import org.hibernate.validator.constraints.NotBlank; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/CustomQuestionResponse.java b/src/main/java/edu/harvard/iq/dataverse/CustomQuestionResponse.java index 32af06014a7..f19ee3c3fc7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/CustomQuestionResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/CustomQuestionResponse.java @@ -7,8 +7,8 @@ import java.io.Serializable; import java.util.List; -import javax.faces.model.SelectItem; -import javax.persistence.*; +import jakarta.faces.model.SelectItem; +import jakarta.persistence.*; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/CustomQuestionValue.java b/src/main/java/edu/harvard/iq/dataverse/CustomQuestionValue.java index a5329c8b96d..f3a6b83b53f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/CustomQuestionValue.java +++ b/src/main/java/edu/harvard/iq/dataverse/CustomQuestionValue.java @@ -1,7 +1,7 @@ package edu.harvard.iq.dataverse; import java.io.Serializable; -import javax.persistence.*; +import jakarta.persistence.*; import org.hibernate.validator.constraints.NotBlank; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/CustomizationFilesServlet.java b/src/main/java/edu/harvard/iq/dataverse/CustomizationFilesServlet.java index 713d365ba0f..9dd524127d7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/CustomizationFilesServlet.java +++ b/src/main/java/edu/harvard/iq/dataverse/CustomizationFilesServlet.java @@ -14,13 +14,13 @@ import java.io.PrintWriter; import java.nio.file.Path; import java.nio.file.Paths; -import javax.servlet.ServletException; -import javax.servlet.annotation.WebServlet; -import javax.servlet.http.HttpServlet; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; +import jakarta.servlet.ServletException; +import jakarta.servlet.annotation.WebServlet; +import jakarta.servlet.http.HttpServlet; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import javax.ejb.EJB; +import jakarta.ejb.EJB; import org.apache.commons.io.IOUtils; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterCache.java b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterCache.java index 7ccd4adb78f..7c75b1a4da6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterCache.java +++ b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterCache.java @@ -7,14 +7,14 @@ import java.io.Serializable; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Lob; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Lob; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; import org.hibernate.validator.constraints.NotBlank; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java index b748897dafe..9ecc4a3ecc9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java +++ b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java @@ -18,11 +18,11 @@ import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.TypedQuery; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.TypedQuery; import edu.harvard.iq.dataverse.settings.JvmSettings; import org.apache.commons.text.StringEscapeUtils; diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteServiceBean.java index fa0a745d80f..48786b41824 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteServiceBean.java @@ -3,7 +3,6 @@ import java.io.IOException; import java.net.HttpURLConnection; import java.net.URL; -import java.util.ArrayList; import java.util.Base64; import java.util.HashMap; import java.util.List; @@ -11,8 +10,8 @@ import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; import edu.harvard.iq.dataverse.settings.JvmSettings; import org.apache.commons.httpclient.HttpException; diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIEZIdServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DOIEZIdServiceBean.java index d9b0fde15da..86b74b72f30 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DOIEZIdServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DOIEZIdServiceBean.java @@ -7,7 +7,7 @@ import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.Stateless; +import jakarta.ejb.Stateless; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DashboardPage.java b/src/main/java/edu/harvard/iq/dataverse/DashboardPage.java index 99c7951c96e..c37c3f52bc7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DashboardPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DashboardPage.java @@ -5,23 +5,21 @@ */ package edu.harvard.iq.dataverse; -import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.harvest.client.HarvestingClient; import edu.harvard.iq.dataverse.harvest.client.HarvestingClientServiceBean; import edu.harvard.iq.dataverse.harvest.server.OAISet; import edu.harvard.iq.dataverse.harvest.server.OAISetServiceBean; -import static edu.harvard.iq.dataverse.util.JsfHelper.JH; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import java.util.List; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.faces.application.FacesMessage; -import javax.faces.context.FacesContext; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.context.FacesContext; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java index 30e03046822..9b4b89db44f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java @@ -14,7 +14,6 @@ import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; -import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; @@ -27,7 +26,7 @@ import java.util.regex.Pattern; import java.util.stream.Collectors; -import javax.ejb.EJBException; +import jakarta.ejb.EJBException; import javax.xml.stream.XMLOutputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamWriter; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index 4e323496188..0f83ae3c5c8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -29,10 +29,10 @@ import java.util.Set; import java.util.logging.Logger; import java.util.stream.Collectors; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.persistence.*; -import javax.validation.constraints.Pattern; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.persistence.*; +import jakarta.validation.constraints.Pattern; import org.hibernate.validator.constraints.NotBlank; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileCategory.java b/src/main/java/edu/harvard/iq/dataverse/DataFileCategory.java index f569a69b13a..f5abe9ac78a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileCategory.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileCategory.java @@ -10,16 +10,16 @@ import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToMany; -import javax.persistence.ManyToOne; -import javax.persistence.Table; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToMany; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Table; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileCategoryServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileCategoryServiceBean.java index 3fa4691a6dd..29dcb22c3ec 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileCategoryServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileCategoryServiceBean.java @@ -3,8 +3,8 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; -import javax.ejb.EJB; -import javax.ejb.Stateless; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileConverter.java b/src/main/java/edu/harvard/iq/dataverse/DataFileConverter.java index 18531f5203d..701e826f12e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileConverter.java @@ -1,13 +1,13 @@ package edu.harvard.iq.dataverse; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.enterprise.inject.spi.CDI; +import jakarta.ejb.EJB; +import jakarta.enterprise.inject.spi.CDI; -import javax.faces.component.UIComponent; -import javax.faces.context.FacesContext; -import javax.faces.convert.Converter; -import javax.faces.convert.FacesConverter; +import jakarta.faces.component.UIComponent; +import jakarta.faces.context.FacesContext; +import jakarta.faces.convert.Converter; +import jakarta.faces.convert.FacesConverter; @FacesConverter("dataFileConverter") public class DataFileConverter implements Converter { diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index c30bfce368a..98ee3351458 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -21,16 +21,16 @@ import java.util.UUID; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.ejb.TransactionAttribute; -import javax.ejb.TransactionAttributeType; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; -import javax.persistence.TypedQuery; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.ejb.TransactionAttribute; +import jakarta.ejb.TransactionAttributeType; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; +import jakarta.persistence.TypedQuery; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java index 275d47cf1de..f4f66d3c874 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java @@ -11,15 +11,15 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.Table; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Table; import org.apache.commons.lang3.StringUtils; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/DataTable.java b/src/main/java/edu/harvard/iq/dataverse/DataTable.java index 614e7394583..a17d8c65138 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataTable.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataTable.java @@ -7,26 +7,23 @@ package edu.harvard.iq.dataverse; import java.io.Serializable; -import java.util.ArrayList; import java.util.List; -import javax.persistence.CascadeType; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.OneToMany; -import javax.validation.constraints.Size; -import javax.persistence.OrderBy; -import org.hibernate.validator.constraints.NotBlank; -import org.hibernate.validator.constraints.URL; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.OneToMany; +import jakarta.validation.constraints.Size; +import jakarta.persistence.OrderBy; import edu.harvard.iq.dataverse.datavariable.DataVariable; import java.util.Objects; -import javax.persistence.Column; -import javax.persistence.Index; -import javax.persistence.Table; +import jakarta.persistence.Column; +import jakarta.persistence.Index; +import jakarta.persistence.Table; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DataTagsAPITestingBean.java b/src/main/java/edu/harvard/iq/dataverse/DataTagsAPITestingBean.java index 2f987dde82b..713c86190fc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataTagsAPITestingBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataTagsAPITestingBean.java @@ -5,11 +5,11 @@ import java.io.Serializable; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.enterprise.context.SessionScoped; -import javax.faces.context.FacesContext; -import javax.inject.Named; -import javax.json.JsonObject; +import jakarta.ejb.EJB; +import jakarta.enterprise.context.SessionScoped; +import jakarta.faces.context.FacesContext; +import jakarta.inject.Named; +import jakarta.json.JsonObject; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DataTagsContainer.java b/src/main/java/edu/harvard/iq/dataverse/DataTagsContainer.java index 5cf9c623bde..eeda70c1f17 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataTagsContainer.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataTagsContainer.java @@ -1,7 +1,7 @@ package edu.harvard.iq.dataverse; -import javax.ejb.Stateless; -import javax.json.JsonObject; +import jakarta.ejb.Stateless; +import jakarta.json.JsonObject; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index f9c839a0fff..620e66c6c54 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -17,22 +17,22 @@ import java.util.List; import java.util.Objects; import java.util.Set; -import javax.persistence.CascadeType; -import javax.persistence.Entity; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.NamedStoredProcedureQuery; -import javax.persistence.OneToMany; -import javax.persistence.OneToOne; -import javax.persistence.OrderBy; -import javax.persistence.ParameterMode; -import javax.persistence.StoredProcedureParameter; -import javax.persistence.Table; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Entity; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.NamedStoredProcedureQuery; +import jakarta.persistence.OneToMany; +import jakarta.persistence.OneToOne; +import jakarta.persistence.OrderBy; +import jakarta.persistence.ParameterMode; +import jakarta.persistence.StoredProcedureParameter; +import jakarta.persistence.Table; +import jakarta.persistence.Temporal; +import jakarta.persistence.TemporalType; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.StringUtil; diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetConverter.java b/src/main/java/edu/harvard/iq/dataverse/DatasetConverter.java index 2d19cf5fe06..b779e084250 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetConverter.java @@ -6,12 +6,12 @@ package edu.harvard.iq.dataverse; -import javax.ejb.EJB; -import javax.enterprise.inject.spi.CDI; -import javax.faces.component.UIComponent; -import javax.faces.context.FacesContext; -import javax.faces.convert.Converter; -import javax.faces.convert.FacesConverter; +import jakarta.ejb.EJB; +import jakarta.enterprise.inject.spi.CDI; +import jakarta.faces.component.UIComponent; +import jakarta.faces.context.FacesContext; +import jakarta.faces.convert.Converter; +import jakarta.faces.convert.FacesConverter; @FacesConverter("datasetConverter") public class DatasetConverter implements Converter { diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetDistributor.java b/src/main/java/edu/harvard/iq/dataverse/DatasetDistributor.java index 00936b9365a..3252b7f0367 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetDistributor.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetDistributor.java @@ -7,7 +7,7 @@ package edu.harvard.iq.dataverse; import java.util.Comparator; -import javax.persistence.Version; +import jakarta.persistence.Version; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetField.java b/src/main/java/edu/harvard/iq/dataverse/DatasetField.java index 31d08f84c02..c836a20893f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetField.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetField.java @@ -19,20 +19,20 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; -import javax.persistence.CascadeType; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.JoinTable; -import javax.persistence.ManyToMany; -import javax.persistence.ManyToOne; -import javax.persistence.OneToMany; -import javax.persistence.OrderBy; -import javax.persistence.Table; -import javax.persistence.Transient; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.JoinTable; +import jakarta.persistence.ManyToMany; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.OneToMany; +import jakarta.persistence.OrderBy; +import jakarta.persistence.Table; +import jakarta.persistence.Transient; import org.apache.commons.lang3.StringUtils; @Entity diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java index 5d83f1e4f8d..c679cd7edad 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java @@ -14,17 +14,17 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import javax.persistence.CascadeType; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.ManyToOne; -import javax.persistence.OneToMany; -import javax.persistence.OrderBy; -import javax.persistence.Table; -import javax.persistence.Transient; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.OneToMany; +import jakarta.persistence.OrderBy; +import jakarta.persistence.Table; +import jakarta.persistence.Transient; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.ImmutablePair; diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java index e57a2a1538d..1621b80df55 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java @@ -6,8 +6,8 @@ package edu.harvard.iq.dataverse; -import javax.enterprise.context.Dependent; -import javax.inject.Named; +import jakarta.enterprise.context.Dependent; +import jakarta.inject.Named; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java index bad482dbca9..7746099818e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java @@ -8,18 +8,18 @@ import java.io.Serializable; import java.util.Collection; -import javax.persistence.CascadeType; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.OneToMany; -import javax.persistence.OrderBy; -import javax.persistence.Table; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.OneToMany; +import jakarta.persistence.OrderBy; +import jakarta.persistence.Table; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java index 89f8c11d076..620d4bf3e09 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java @@ -17,24 +17,24 @@ import java.util.Set; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonArrayBuilder; -import javax.json.JsonException; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.json.JsonReader; -import javax.json.JsonString; -import javax.json.JsonValue; -import javax.json.JsonValue.ValueType; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.NonUniqueResultException; -import javax.persistence.PersistenceContext; -import javax.persistence.TypedQuery; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonException; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.json.JsonReader; +import jakarta.json.JsonString; +import jakarta.json.JsonValue; +import jakarta.json.JsonValue.ValueType; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.NonUniqueResultException; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.TypedQuery; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.httpclient.HttpException; diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java index df126514308..824b486a42d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java @@ -13,8 +13,8 @@ import java.util.Set; import java.util.TreeMap; import java.util.MissingResourceException; -import javax.faces.model.SelectItem; -import javax.persistence.*; +import jakarta.faces.model.SelectItem; +import jakarta.persistence.*; /** * Defines the meaning and constraints of a metadata field and its values. diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValidator.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValidator.java index 3ded24d7a59..6d3fda2812d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValidator.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValidator.java @@ -5,11 +5,11 @@ */ package edu.harvard.iq.dataverse; -import javax.validation.ConstraintValidator; -import javax.validation.ConstraintValidatorContext; +import jakarta.validation.ConstraintValidator; +import jakarta.validation.ConstraintValidatorContext; import edu.harvard.iq.dataverse.util.BundleUtil; -import java.util.Collections; + import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValue.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValue.java index 2447a6478fd..1064187ccd6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValue.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValue.java @@ -10,17 +10,17 @@ import edu.harvard.iq.dataverse.util.MarkupChecker; import java.io.Serializable; import java.util.Comparator; -import java.util.ResourceBundle; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.Table; -import javax.persistence.Transient; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Table; +import jakarta.persistence.Transient; import org.apache.commons.lang3.StringUtils; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValueValidator.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValueValidator.java index 132955859ff..b6c21014f04 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValueValidator.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValueValidator.java @@ -13,14 +13,13 @@ import java.util.GregorianCalendar; import java.util.logging.Logger; import java.util.regex.Pattern; -import javax.validation.ConstraintValidator; -import javax.validation.ConstraintValidatorContext; +import jakarta.validation.ConstraintValidator; +import jakarta.validation.ConstraintValidatorContext; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.validation.EMailValidator; import edu.harvard.iq.dataverse.validation.URLValidator; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.validator.routines.UrlValidator; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingDataverse.java b/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingDataverse.java index 8f8e9b103c1..dec07a09643 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingDataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingDataverse.java @@ -2,19 +2,19 @@ import java.io.Serializable; import java.util.Date; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.OneToOne; -import javax.persistence.Table; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.OneToOne; +import jakarta.persistence.Table; +import jakarta.persistence.Temporal; +import jakarta.persistence.TemporalType; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingServiceBean.java index 3789efcd443..39c82bfa3f1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingServiceBean.java @@ -8,12 +8,13 @@ import java.util.ArrayList; import java.util.List; import java.util.logging.Logger; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; -import javax.persistence.TypedQuery; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; +import jakarta.persistence.TypedQuery; /** * @@ -63,7 +64,7 @@ public DatasetLinkingDataverse findDatasetLinkingDataverse(Long datasetId, Long .setParameter("datasetId", datasetId) .setParameter("linkingDataverseId", linkingDataverseId) .getSingleResult(); - } catch (javax.persistence.NoResultException e) { + } catch (NoResultException e) { logger.fine("no datasetLinkingDataverse found for datasetId " + datasetId + " and linkingDataverseId " + linkingDataverseId); return null; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java b/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java index 7b857545c20..cc0078ecbc5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java @@ -20,25 +20,24 @@ package edu.harvard.iq.dataverse; -import static edu.harvard.iq.dataverse.DatasetLock.Reason.Workflow; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import java.util.Date; import java.io.Serializable; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.EnumType; -import javax.persistence.Enumerated; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.Table; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Table; +import jakarta.persistence.Temporal; +import jakarta.persistence.TemporalType; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 35d235146f4..d20175b6e1a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -58,7 +58,6 @@ import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.SystemConfig; -import edu.harvard.iq.dataverse.util.URLTokenUtil; import edu.harvard.iq.dataverse.util.WebloaderUtil; import edu.harvard.iq.dataverse.validation.URLValidator; import edu.harvard.iq.dataverse.workflows.WorkflowComment; @@ -84,27 +83,27 @@ import java.util.logging.Logger; import java.util.stream.Collectors; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.faces.application.FacesMessage; -import javax.faces.context.FacesContext; -import javax.faces.event.ActionEvent; -import javax.faces.event.ValueChangeEvent; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.context.FacesContext; +import jakarta.faces.event.ActionEvent; +import jakarta.faces.event.ValueChangeEvent; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; import org.apache.commons.lang3.StringUtils; import org.primefaces.event.FileUploadEvent; import org.primefaces.model.file.UploadedFile; -import javax.validation.ConstraintViolation; +import jakarta.validation.ConstraintViolation; import org.apache.commons.httpclient.HttpClient; //import org.primefaces.context.RequestContext; import java.util.Arrays; import java.util.HashSet; -import javax.faces.model.SelectItem; -import javax.faces.validator.ValidatorException; +import jakarta.faces.model.SelectItem; +import jakarta.faces.validator.ValidatorException; import java.util.logging.Level; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; @@ -125,12 +124,12 @@ import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean; import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry; import java.util.Collections; -import javax.faces.component.UIComponent; -import javax.faces.component.UIInput; +import jakarta.faces.component.UIComponent; +import jakarta.faces.component.UIInput; -import javax.faces.event.AjaxBehaviorEvent; -import javax.servlet.ServletOutputStream; -import javax.servlet.http.HttpServletResponse; +import jakarta.faces.event.AjaxBehaviorEvent; +import jakarta.servlet.ServletOutputStream; +import jakarta.servlet.http.HttpServletResponse; import org.apache.commons.text.StringEscapeUtils; import org.apache.commons.lang3.mutable.MutableBoolean; @@ -389,6 +388,8 @@ public void setShowIngestSuccess(boolean showIngestSuccess) { Map> previewToolsByFileId = new HashMap<>(); // TODO: Consider renaming "previewTools" to "filePreviewTools". List previewTools = new ArrayList<>(); + Map> fileQueryToolsByFileId = new HashMap<>(); + List fileQueryTools = new ArrayList<>(); private List datasetExploreTools; public Boolean isHasRsyncScript() { @@ -2150,6 +2151,7 @@ private String init(boolean initFull) { configureTools = externalToolService.findFileToolsByType(ExternalTool.Type.CONFIGURE); exploreTools = externalToolService.findFileToolsByType(ExternalTool.Type.EXPLORE); previewTools = externalToolService.findFileToolsByType(ExternalTool.Type.PREVIEW); + fileQueryTools = externalToolService.findFileToolsByType(ExternalTool.Type.QUERY); datasetExploreTools = externalToolService.findDatasetToolsByType(ExternalTool.Type.EXPLORE); rowsPerPage = 10; if (dataset.getId() != null && canUpdateDataset()) { @@ -5506,12 +5508,25 @@ public boolean isShowPreviewButton(Long fileId) { List previewTools = getPreviewToolsForDataFile(fileId); return previewTools.size() > 0; } + + public boolean isShowQueryButton(Long fileId) { + DataFile dataFile = datafileService.find(fileId); + + if(dataFile.isRestricted() || !dataFile.isReleased() || FileUtil.isActivelyEmbargoed(dataFile)){ + return false; + } + + List fileQueryTools = getQueryToolsForDataFile(fileId); + return fileQueryTools.size() > 0; + } public List getPreviewToolsForDataFile(Long fileId) { return getCachedToolsForDataFile(fileId, ExternalTool.Type.PREVIEW); } - + public List getQueryToolsForDataFile(Long fileId) { + return getCachedToolsForDataFile(fileId, ExternalTool.Type.QUERY); + } public List getConfigureToolsForDataFile(Long fileId) { return getCachedToolsForDataFile(fileId, ExternalTool.Type.CONFIGURE); } @@ -5536,6 +5551,10 @@ public List getCachedToolsForDataFile(Long fileId, ExternalTool.Ty cachedToolsByFileId = previewToolsByFileId; externalTools = previewTools; break; + case QUERY: + cachedToolsByFileId = fileQueryToolsByFileId; + externalTools = fileQueryTools; + break; default: break; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetRelMaterial.java b/src/main/java/edu/harvard/iq/dataverse/DatasetRelMaterial.java index f432e4f5bbf..53ea62f566a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetRelMaterial.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetRelMaterial.java @@ -6,14 +6,14 @@ package edu.harvard.iq.dataverse; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.Version; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Version; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index c93236f347b..52eb5868c35 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -31,22 +31,21 @@ import java.util.logging.FileHandler; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.Asynchronous; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.ejb.Stateless; -import javax.ejb.TransactionAttribute; -import javax.ejb.TransactionAttributeType; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.LockModeType; -import javax.persistence.NoResultException; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; -import javax.persistence.StoredProcedureQuery; -import javax.persistence.TypedQuery; -import org.apache.commons.lang3.RandomStringUtils; -import org.ocpsoft.common.util.Strings; +import jakarta.ejb.Asynchronous; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.ejb.Stateless; +import jakarta.ejb.TransactionAttribute; +import jakarta.ejb.TransactionAttributeType; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.LockModeType; +import jakarta.persistence.NoResultException; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; +import jakarta.persistence.StoredProcedureQuery; +import jakarta.persistence.TypedQuery; +import org.apache.commons.lang3.StringUtils; /** * @@ -119,7 +118,7 @@ public Dataset find(Object pk) { public Dataset findDeep(Object pk) { return (Dataset) em.createNamedQuery("Dataset.findById") .setParameter("id", pk) - // Optimization hints: retrieve all data in one query; this prevents point queries when iterating over the files + // Optimization hints: retrieve all data in one query; this prevents point queries when iterating over the files .setHint("eclipselink.left-join-fetch", "o.files.ingestRequest") .setHint("eclipselink.left-join-fetch", "o.files.thumbnailForDataset") .setHint("eclipselink.left-join-fetch", "o.files.dataTables") @@ -331,7 +330,7 @@ public Dataset findByGlobalId(String globalId) { * in the dataset components, a ConstraintViolationException will be thrown, * which can be further parsed to detect the specific offending values. * @param id the id of the dataset - * @throws javax.validation.ConstraintViolationException + * @throws ConstraintViolationException */ @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) @@ -399,7 +398,7 @@ public DatasetVersionUser getDatasetVersionUser(DatasetVersion version, User use query.setParameter("userId", au.getId()); try { return query.getSingleResult(); - } catch (javax.persistence.NoResultException e) { + } catch (NoResultException e) { return null; } } @@ -514,7 +513,7 @@ public List listLocks(DatasetLock.Reason lockType, AuthenticatedUse } try { return query.getResultList(); - } catch (javax.persistence.NoResultException e) { + } catch (NoResultException e) { return null; } } @@ -595,7 +594,7 @@ public Map getArchiveDescriptionsForHarvestedDatasets(Set da return null; } - String datasetIdStr = Strings.join(datasetIds, ", "); + String datasetIdStr = StringUtils.join(datasetIds, ", "); String qstr = "SELECT d.id, h.archiveDescription FROM harvestingClient h, dataset d WHERE d.harvestingClient_id = h.id AND d.id IN (" + datasetIdStr + ")"; List searchResults; @@ -771,11 +770,11 @@ public void reExportDatasetAsync(Dataset dataset) { public void exportDataset(Dataset dataset, boolean forceReExport) { if (dataset != null) { - // Note that the logic for handling a dataset is similar to what is implemented in exportAllDatasets, + // Note that the logic for handling a dataset is similar to what is implemented in exportAllDatasets, // but when only one dataset is exported we do not log in a separate export logging file if (dataset.isReleased() && dataset.getReleasedVersion() != null && !dataset.isDeaccessioned()) { - // can't trust dataset.getPublicationDate(), no. + // can't trust dataset.getPublicationDate(), no. Date publicationDate = dataset.getReleasedVersion().getReleaseTime(); // we know this dataset has a non-null released version! Maybe not - SEK 8/19 (We do now! :) if (forceReExport || (publicationDate != null && (dataset.getLastExportTime() == null diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetTopicClass.java b/src/main/java/edu/harvard/iq/dataverse/DatasetTopicClass.java index f253e1810a1..91a4ff3cf5a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetTopicClass.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetTopicClass.java @@ -6,8 +6,8 @@ package edu.harvard.iq.dataverse; -import javax.persistence.Column; -import javax.persistence.Version; +import jakarta.persistence.Column; +import jakarta.persistence.Version; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index 9d5c27ae9fd..93f45bd288e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -25,37 +25,37 @@ import java.util.logging.Logger; import java.util.stream.Collectors; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.persistence.CascadeType; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.EnumType; -import javax.persistence.Enumerated; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.OneToMany; -import javax.persistence.OneToOne; -import javax.persistence.OrderBy; -import javax.persistence.Table; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; -import javax.persistence.Transient; -import javax.persistence.UniqueConstraint; -import javax.persistence.Version; -import javax.validation.ConstraintViolation; -import javax.validation.Validation; -import javax.validation.Validator; -import javax.validation.constraints.Size; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.OneToMany; +import jakarta.persistence.OneToOne; +import jakarta.persistence.OrderBy; +import jakarta.persistence.Table; +import jakarta.persistence.Temporal; +import jakarta.persistence.TemporalType; +import jakarta.persistence.Transient; +import jakarta.persistence.UniqueConstraint; +import jakarta.persistence.Version; +import jakarta.validation.ConstraintViolation; +import jakarta.validation.Validation; +import jakarta.validation.Validator; +import jakarta.validation.constraints.Size; import org.apache.commons.lang3.StringUtils; /** @@ -1814,7 +1814,7 @@ public String getPublicationDateAsString() { // So something will need to be modified to accommodate this. -- L.A. /** * We call the export format "Schema.org JSON-LD" and extensive Javadoc can - * be found in {@link SchemaDotOrgExporter}. + * be found in {@link edu.harvard.iq.dataverse.export.SchemaDotOrgExporter}. */ public String getJsonLd() { // We show published datasets only for "datePublished" field below. diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionConverter.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionConverter.java index 98f0d707bdc..b670fb18afc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionConverter.java @@ -5,12 +5,12 @@ */ package edu.harvard.iq.dataverse; -import javax.ejb.EJB; -import javax.enterprise.inject.spi.CDI; -import javax.faces.component.UIComponent; -import javax.faces.context.FacesContext; -import javax.faces.convert.Converter; -import javax.faces.convert.FacesConverter; +import jakarta.ejb.EJB; +import jakarta.enterprise.inject.spi.CDI; +import jakarta.faces.component.UIComponent; +import jakarta.faces.context.FacesContext; +import jakarta.faces.convert.Converter; +import jakarta.faces.convert.FacesConverter; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionNoteValidator.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionNoteValidator.java index c086fed3b10..a5ea487a68f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionNoteValidator.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionNoteValidator.java @@ -6,8 +6,8 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.util.BundleUtil; -import javax.validation.ConstraintValidator; -import javax.validation.ConstraintValidatorContext; +import jakarta.validation.ConstraintValidator; +import jakarta.validation.ConstraintValidatorContext; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 607c46d3662..28243c37eee 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -13,7 +13,7 @@ import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.MarkupChecker; import edu.harvard.iq.dataverse.util.SystemConfig; -import java.io.IOException; + import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; @@ -22,22 +22,21 @@ import java.util.HashMap; import java.util.Iterator; import java.util.List; -import java.util.concurrent.Future; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.json.Json; -import javax.json.JsonObjectBuilder; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; -import javax.persistence.TypedQuery; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.json.Json; +import jakarta.json.JsonObjectBuilder; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; +import jakarta.persistence.TypedQuery; import org.apache.commons.lang3.StringUtils; -import org.apache.solr.client.solrj.SolrServerException; - + /** * * @author skraffmiller @@ -196,7 +195,7 @@ public DatasetVersion findByFriendlyVersionNumber(Long datasetId, String friendl query.setParameter("majorVersionNumber", majorVersionNumber); query.setParameter("minorVersionNumber", minorVersionNumber); foundDatasetVersion = (DatasetVersion) query.getSingleResult(); - } catch (javax.persistence.NoResultException e) { + } catch (NoResultException e) { logger.warning("no ds version found: " + datasetId + " " + friendlyVersionNumber); // DO nothing, just return null. } @@ -224,7 +223,7 @@ public DatasetVersion findByFriendlyVersionNumber(Long datasetId, String friendl } } return retVal; - } catch (javax.persistence.NoResultException e) { + } catch (NoResultException e) { logger.warning("no ds version found: " + datasetId + " " + friendlyVersionNumber); // DO nothing, just return null. } @@ -451,7 +450,7 @@ private DatasetVersion getDatasetVersionByQuery(String queryString){ msg("Found: " + ds); return ds; - } catch (javax.persistence.NoResultException e) { + } catch (NoResultException e) { msg("DatasetVersion not found: " + queryString); logger.log(Level.FINE, "DatasetVersion not found: {0}", queryString); return null; @@ -1217,7 +1216,7 @@ public List getUnarchivedDatasetVersions(){ try { List dsl = em.createNamedQuery("DatasetVersion.findUnarchivedReleasedVersion", DatasetVersion.class).getResultList(); return dsl; - } catch (javax.persistence.NoResultException e) { + } catch (NoResultException e) { logger.log(Level.FINE, "No unarchived DatasetVersions found: {0}"); return null; } catch (EJBException e) { diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java index 6e9f9c17f7a..55b98c178bb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java @@ -6,6 +6,7 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.util.MarkupChecker; + import java.io.Serializable; import java.util.ArrayList; import java.util.Calendar; @@ -14,12 +15,12 @@ import java.util.Date; import java.util.List; import java.util.TreeMap; -import javax.ejb.EJB; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.json.JsonObject; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; + +import jakarta.ejb.EJB; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUser.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUser.java index eda62a080f8..e56fad71253 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUser.java @@ -4,20 +4,20 @@ import edu.harvard.iq.dataverse.authorization.users.User; import java.io.Serializable; import java.sql.Timestamp; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; +import jakarta.persistence.Id; +import jakarta.persistence.Index; -import javax.persistence.JoinColumn; +import jakarta.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.Table; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.Table; /** * Records the last time a {@link User} handled a {@link DatasetVersion}. diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetWidgetsPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetWidgetsPage.java index 9c47a58811a..1dd42903118 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetWidgetsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetWidgetsPage.java @@ -14,10 +14,10 @@ import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; import org.primefaces.event.FileUploadEvent; import org.primefaces.model.file.UploadedFile; diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java index 50d5ae09548..682c1dc6744 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java @@ -2,7 +2,6 @@ import edu.harvard.iq.dataverse.harvest.client.HarvestingClient; import edu.harvard.iq.dataverse.authorization.DataverseRole; -import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.search.savedsearch.SavedSearch; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.SystemConfig; @@ -13,29 +12,28 @@ import java.util.List; import java.util.Objects; import java.util.Set; -import javax.persistence.CascadeType; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.EnumType; -import javax.persistence.Enumerated; -import javax.persistence.FetchType; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.JoinTable; -import javax.persistence.ManyToMany; -import javax.persistence.ManyToOne; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.OneToMany; -import javax.persistence.OneToOne; -import javax.persistence.OrderBy; -import javax.persistence.Table; -import javax.persistence.Transient; -import javax.validation.constraints.NotNull; -import javax.validation.constraints.Pattern; -import javax.validation.constraints.Size; - -import org.apache.commons.lang3.StringUtils; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.FetchType; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.JoinTable; +import jakarta.persistence.ManyToMany; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.OneToMany; +import jakarta.persistence.OneToOne; +import jakarta.persistence.OrderBy; +import jakarta.persistence.Table; +import jakarta.persistence.Transient; +import jakarta.validation.constraints.NotNull; +import jakarta.validation.constraints.Pattern; +import jakarta.validation.constraints.Size; + import org.hibernate.validator.constraints.NotBlank; import org.hibernate.validator.constraints.NotEmpty; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java b/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java index 46021ddbc9b..d77767985eb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java @@ -7,15 +7,15 @@ import java.io.Serializable; import java.util.Objects; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.Table; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Table; import edu.harvard.iq.dataverse.validation.ValidateEmail; import org.hibernate.validator.constraints.NotBlank; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java b/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java index 7d09c300dde..d802117043b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java @@ -6,12 +6,12 @@ package edu.harvard.iq.dataverse; -import javax.ejb.EJB; -import javax.enterprise.inject.spi.CDI; -import javax.faces.component.UIComponent; -import javax.faces.context.FacesContext; -import javax.faces.convert.Converter; -import javax.faces.convert.FacesConverter; +import jakarta.ejb.EJB; +import jakarta.enterprise.inject.spi.CDI; +import jakarta.faces.component.UIComponent; +import jakarta.faces.context.FacesContext; +import jakarta.faces.convert.Converter; +import jakarta.faces.convert.FacesConverter; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java index bfd465b8f54..768c2308e50 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java @@ -8,16 +8,16 @@ import java.io.Serializable; import java.util.Objects; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.Table; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.Table; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java index 67bf6a820e2..5c77989f6d6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java @@ -2,11 +2,11 @@ import edu.harvard.iq.dataverse.util.LruCache; import java.util.List; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java index 662ee74c3bf..39ad6ca9520 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java @@ -2,16 +2,16 @@ import java.io.Serializable; import java.util.Objects; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.Table; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.Table; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevel.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevel.java index 92b1ff7c2cf..c4749be0cb3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevel.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevel.java @@ -6,17 +6,17 @@ package edu.harvard.iq.dataverse; import java.io.Serializable; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.Table; -import javax.persistence.UniqueConstraint; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.Table; +import jakarta.persistence.UniqueConstraint; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevelServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevelServiceBean.java index 42a1290fdbd..66c700f59ce 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevelServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevelServiceBean.java @@ -7,13 +7,13 @@ import edu.harvard.iq.dataverse.util.LruCache; import java.util.List; -import java.util.logging.Logger; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; + +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseHeaderFragment.java b/src/main/java/edu/harvard/iq/dataverse/DataverseHeaderFragment.java index 1e1353a11fc..389b85c19d9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseHeaderFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseHeaderFragment.java @@ -19,11 +19,11 @@ import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.faces.context.FacesContext; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.faces.context.FacesContext; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; import org.apache.commons.lang3.StringUtils; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingDataverse.java b/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingDataverse.java index 788308dce1e..3030922ea5e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingDataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingDataverse.java @@ -7,18 +7,18 @@ import java.io.Serializable; import java.util.Date; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.OneToOne; -import javax.persistence.Table; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.OneToOne; +import jakarta.persistence.Table; +import jakarta.persistence.Temporal; +import jakarta.persistence.TemporalType; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingServiceBean.java index c823deddb64..834ff96e392 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingServiceBean.java @@ -8,13 +8,13 @@ import java.util.ArrayList; import java.util.List; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; -import javax.persistence.TypedQuery; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; +import jakarta.persistence.TypedQuery; /** * @@ -66,7 +66,7 @@ public DataverseLinkingDataverse findDataverseLinkingDataverse(Long dataverseId, .setParameter("dataverseId", dataverseId) .setParameter("linkingDataverseId", linkingDataverseId) .getSingleResult(); - } catch (javax.persistence.NoResultException e) { + } catch (jakarta.persistence.NoResultException e) { logger.fine("No DataverseLinkingDataverse found for dataverseId " + dataverseId + " and linkedDataverseId " + linkingDataverseId); return null; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseMetadataBlockFacet.java b/src/main/java/edu/harvard/iq/dataverse/DataverseMetadataBlockFacet.java index a2659b81974..c93144b2e97 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseMetadataBlockFacet.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseMetadataBlockFacet.java @@ -1,13 +1,13 @@ package edu.harvard.iq.dataverse; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.Table; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Table; import java.io.Serializable; import java.util.Objects; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java index b48ff725e1e..daf33f444d9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java @@ -28,27 +28,26 @@ import static edu.harvard.iq.dataverse.util.JsfHelper.JH; import edu.harvard.iq.dataverse.util.SystemConfig; import java.util.List; -import javax.ejb.EJB; -import javax.faces.application.FacesMessage; -import javax.faces.context.FacesContext; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.context.FacesContext; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; -import java.util.HashSet; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; -import javax.faces.component.UIComponent; -import javax.faces.component.UIInput; +import jakarta.faces.component.UIComponent; +import jakarta.faces.component.UIInput; import org.primefaces.model.DualListModel; -import javax.ejb.EJBException; -import javax.faces.event.ValueChangeEvent; -import javax.faces.model.SelectItem; +import jakarta.ejb.EJBException; +import jakarta.faces.event.ValueChangeEvent; +import jakarta.faces.model.SelectItem; import org.apache.commons.text.StringEscapeUtils; import org.apache.commons.lang3.StringUtils; import org.primefaces.PrimeFaces; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseRequestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseRequestServiceBean.java index e193b535412..58a3837dbf9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseRequestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseRequestServiceBean.java @@ -1,11 +1,11 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; -import javax.annotation.PostConstruct; -import javax.enterprise.context.RequestScoped; -import javax.inject.Inject; -import javax.inject.Named; -import javax.servlet.http.HttpServletRequest; +import jakarta.annotation.PostConstruct; +import jakarta.enterprise.context.RequestScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.servlet.http.HttpServletRequest; /** * The service bean to go to when one needs the current {@link DataverseRequest}. diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java index b83593f5b6e..78d5eaf3414 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java @@ -17,13 +17,13 @@ import java.util.Set; import java.util.logging.Logger; import java.util.stream.Collectors; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.TypedQuery; -//import javax.validation.constraints.NotNull; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.TypedQuery; +//import jakarta.validation.constraints.NotNull; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index e092f209acd..7194a1ef31e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -30,18 +30,18 @@ import java.util.Map; import java.util.logging.Logger; import java.util.Properties; -import java.util.concurrent.Future; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Inject; -import javax.inject.Named; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.NonUniqueResultException; -import javax.persistence.PersistenceContext; -import javax.persistence.TypedQuery; + +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.NonUniqueResultException; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.TypedQuery; import org.apache.solr.client.solrj.SolrServerException; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java b/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java index c6016939c08..e8d76e1825e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java @@ -18,13 +18,13 @@ import java.util.List; import java.util.Locale; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.enterprise.context.SessionScoped; -import javax.faces.context.FacesContext; -import javax.inject.Inject; -import javax.inject.Named; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpSession; +import jakarta.ejb.EJB; +import jakarta.enterprise.context.SessionScoped; +import jakarta.faces.context.FacesContext; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpSession; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java b/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java index 0c6341db485..539669328a7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java @@ -8,16 +8,16 @@ import java.io.Serializable; import java.util.Objects; -import javax.persistence.Entity; -import javax.persistence.EnumType; -import javax.persistence.Enumerated; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.OneToOne; -import javax.persistence.Table; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.OneToOne; +import jakarta.persistence.Table; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DefaultValueSet.java b/src/main/java/edu/harvard/iq/dataverse/DefaultValueSet.java index ad48f15fc54..a2dc785c470 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DefaultValueSet.java +++ b/src/main/java/edu/harvard/iq/dataverse/DefaultValueSet.java @@ -8,13 +8,13 @@ import java.io.Serializable; import java.util.List; -import javax.persistence.CascadeType; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.OneToMany; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.OneToMany; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java index e3013b8cf51..9e7f3f3fe96 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java @@ -5,14 +5,13 @@ import java.sql.Timestamp; import java.text.SimpleDateFormat; -import java.util.Arrays; import java.util.Date; import java.util.List; import java.util.Objects; import java.util.Set; import java.util.logging.Logger; -import javax.persistence.*; +import jakarta.persistence.*; /** * Base of the object hierarchy for "anything that can be inside a dataverse". diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java b/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java index 6ff01ef3ea8..a322a25103e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java @@ -2,8 +2,7 @@ import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.util.SystemConfig; -import java.util.Locale; -import javax.persistence.MappedSuperclass; +import jakarta.persistence.MappedSuperclass; import org.apache.commons.lang3.StringUtils; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java index c9127af7c2b..d4219c36149 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java @@ -12,19 +12,18 @@ import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.Stateless; -import javax.ejb.TransactionAttribute; -import static javax.ejb.TransactionAttributeType.REQUIRES_NEW; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.NonUniqueResultException; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; -import javax.persistence.StoredProcedureQuery; +import jakarta.ejb.Stateless; +import jakarta.ejb.TransactionAttribute; +import static jakarta.ejb.TransactionAttributeType.REQUIRES_NEW; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.NonUniqueResultException; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; +import jakarta.persistence.StoredProcedureQuery; import org.apache.commons.lang3.StringUtils; -import org.ocpsoft.common.util.Strings; /** * Your goto bean for everything {@link DvObject}, that's not tied to any @@ -134,7 +133,7 @@ private DvObject runFindByGlobalId(Query query, GlobalId gid, DvObject.DType dty query.setParameter("authority", gid.getAuthority()); query.setParameter("dtype", dtype.getDType()); foundDvObject = (DvObject) query.getSingleResult(); - } catch (javax.persistence.NoResultException e) { + } catch (NoResultException e) { // (set to .info, this can fill the log file with thousands of // these messages during a large harvest run) logger.fine("no dvObject found: " + gid.asString()); @@ -155,7 +154,7 @@ private Long runFindIdByGlobalId(Query query, GlobalId gid, DvObject.DType dtype query.setParameter("authority", gid.getAuthority()); query.setParameter("dtype", dtype.getDType()); foundDvObject = (Long) query.getSingleResult(); - } catch (javax.persistence.NoResultException e) { + } catch (NoResultException e) { // (set to .info, this can fill the log file with thousands of // these messages during a large harvest run) logger.fine("no dvObject found: " + gid.asString()); @@ -323,7 +322,7 @@ public Map getObjectPathsByIds(Set objectIds){ return null; } - String datasetIdStr = Strings.join(objectIds, ", "); + String datasetIdStr = StringUtils.join(objectIds, ", "); String qstr = "WITH RECURSIVE path_elements AS ((" + " SELECT id, owner_id FROM dvobject WHERE id in (" + datasetIdStr + "))" + diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDataFilesPageHelper.java b/src/main/java/edu/harvard/iq/dataverse/EditDataFilesPageHelper.java index 1bf6bee82eb..883baeedef4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDataFilesPageHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDataFilesPageHelper.java @@ -4,8 +4,8 @@ import edu.harvard.iq.dataverse.util.file.CreateDataFileResult; import org.apache.commons.text.StringEscapeUtils; -import javax.ejb.Stateless; -import javax.inject.Inject; +import jakarta.ejb.Stateless; +import jakarta.inject.Inject; import java.util.Arrays; import java.util.List; import java.util.Optional; diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 1c033b37872..02a148f8cc5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -39,7 +39,6 @@ import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.JsfHelper; import edu.harvard.iq.dataverse.util.SystemConfig; -import edu.harvard.iq.dataverse.util.URLTokenUtil; import edu.harvard.iq.dataverse.util.WebloaderUtil; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.EjbUtil; @@ -58,23 +57,23 @@ import java.util.Map; import java.util.Optional; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.faces.application.FacesMessage; -import javax.faces.context.FacesContext; -import javax.faces.event.ActionEvent; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.context.FacesContext; +import jakarta.faces.event.ActionEvent; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; import edu.harvard.iq.dataverse.util.file.CreateDataFileResult; import org.primefaces.event.FileUploadEvent; import org.primefaces.model.file.UploadedFile; -import javax.json.Json; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.json.JsonArray; -import javax.json.JsonReader; +import jakarta.json.Json; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.json.JsonArray; +import jakarta.json.JsonReader; import org.apache.commons.httpclient.HttpClient; import org.apache.commons.io.IOUtils; import org.apache.commons.httpclient.methods.GetMethod; @@ -82,10 +81,10 @@ import java.util.Collection; import java.util.Set; import java.util.logging.Level; -import javax.faces.event.AjaxBehaviorEvent; -import javax.faces.event.FacesEvent; -import javax.servlet.ServletOutputStream; -import javax.servlet.http.HttpServletResponse; +import jakarta.faces.event.AjaxBehaviorEvent; +import jakarta.faces.event.FacesEvent; +import jakarta.servlet.ServletOutputStream; +import jakarta.servlet.http.HttpServletResponse; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.mutable.MutableBoolean; import org.primefaces.PrimeFaces; diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java index 7185887ecc3..bad8903c091 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java +++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java @@ -24,11 +24,10 @@ import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.search.SearchServiceBean; import java.util.Map; -import java.util.Map.Entry; import java.util.Set; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; import edu.harvard.iq.dataverse.search.SolrIndexServiceBean; import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -41,16 +40,16 @@ import java.util.Stack; import java.util.logging.Level; import java.util.logging.Logger; -import javax.annotation.Resource; -import javax.ejb.EJBContext; -import javax.ejb.EJBException; -import javax.ejb.TransactionAttribute; -import static javax.ejb.TransactionAttributeType.REQUIRES_NEW; -import static javax.ejb.TransactionAttributeType.SUPPORTS; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.validation.ConstraintViolation; -import javax.validation.ConstraintViolationException; +import jakarta.annotation.Resource; +import jakarta.ejb.EJBContext; +import jakarta.ejb.EJBException; +import jakarta.ejb.TransactionAttribute; +import static jakarta.ejb.TransactionAttributeType.REQUIRES_NEW; +import static jakarta.ejb.TransactionAttributeType.SUPPORTS; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.validation.ConstraintViolation; +import jakarta.validation.ConstraintViolationException; /** * An EJB capable of executing {@link Command}s in a JEE environment. diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngineInner.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngineInner.java index d8339dce856..891fe91dc66 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngineInner.java +++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngineInner.java @@ -4,13 +4,13 @@ import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -import javax.annotation.Resource; -import javax.ejb.EJBContext; -import javax.ejb.Stateless; -import javax.ejb.TransactionAttribute; -import static javax.ejb.TransactionAttributeType.REQUIRED; +import jakarta.annotation.Resource; +import jakarta.ejb.EJBContext; +import jakarta.ejb.Stateless; +import jakarta.ejb.TransactionAttribute; +import static jakarta.ejb.TransactionAttributeType.REQUIRED; -import javax.inject.Named; +import jakarta.inject.Named; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/Embargo.java b/src/main/java/edu/harvard/iq/dataverse/Embargo.java index eac83edd296..29959b9f2d4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Embargo.java +++ b/src/main/java/edu/harvard/iq/dataverse/Embargo.java @@ -1,7 +1,7 @@ package edu.harvard.iq.dataverse; -import javax.persistence.*; +import jakarta.persistence.*; import edu.harvard.iq.dataverse.util.BundleUtil; diff --git a/src/main/java/edu/harvard/iq/dataverse/EmbargoServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/EmbargoServiceBean.java index afbeab404c7..d0a8d214959 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EmbargoServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/EmbargoServiceBean.java @@ -3,12 +3,12 @@ import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; import java.util.List; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/ExternalVocabularyValue.java b/src/main/java/edu/harvard/iq/dataverse/ExternalVocabularyValue.java index 3618da79630..7ebfa0302ac 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ExternalVocabularyValue.java +++ b/src/main/java/edu/harvard/iq/dataverse/ExternalVocabularyValue.java @@ -9,13 +9,13 @@ import java.io.Serializable; import java.sql.Timestamp; import java.util.Objects; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.Table; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.Table; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/FacetConverter.java b/src/main/java/edu/harvard/iq/dataverse/FacetConverter.java index 75ef62200bf..fd41315dbc0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FacetConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/FacetConverter.java @@ -5,13 +5,13 @@ */ package edu.harvard.iq.dataverse; -import javax.ejb.EJB; -import javax.enterprise.inject.spi.CDI; +import jakarta.ejb.EJB; +import jakarta.enterprise.inject.spi.CDI; -import javax.faces.component.UIComponent; -import javax.faces.context.FacesContext; -import javax.faces.convert.Converter; -import javax.faces.convert.FacesConverter; +import jakarta.faces.component.UIComponent; +import jakarta.faces.context.FacesContext; +import jakarta.faces.convert.Converter; +import jakarta.faces.convert.FacesConverter; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/FeaturedDataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FeaturedDataverseServiceBean.java index e7362587c36..d4d701cb02f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FeaturedDataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FeaturedDataverseServiceBean.java @@ -9,11 +9,11 @@ import java.util.ArrayList; import java.util.List; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java index 76c5df4409a..6f68815c2ca 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java @@ -2,16 +2,16 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; -import javax.persistence.Column; -import javax.persistence.Embeddable; -import javax.persistence.EmbeddedId; -import javax.persistence.Entity; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.MapsId; -import javax.persistence.Table; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; +import jakarta.persistence.Column; +import jakarta.persistence.Embeddable; +import jakarta.persistence.EmbeddedId; +import jakarta.persistence.Entity; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.MapsId; +import jakarta.persistence.Table; +import jakarta.persistence.Temporal; +import jakarta.persistence.TemporalType; import java.io.Serializable; import java.util.Date; diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDirectoryNameValidator.java b/src/main/java/edu/harvard/iq/dataverse/FileDirectoryNameValidator.java index e0c2b83ab65..84c033afcaf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDirectoryNameValidator.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDirectoryNameValidator.java @@ -7,8 +7,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import javax.validation.ConstraintValidator; -import javax.validation.ConstraintValidatorContext; +import jakarta.validation.ConstraintValidator; +import jakarta.validation.ConstraintValidatorContext; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownload.java b/src/main/java/edu/harvard/iq/dataverse/FileDownload.java index fad03d2a0a1..a79281f71f0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownload.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownload.java @@ -6,18 +6,18 @@ package edu.harvard.iq.dataverse; import java.io.Serializable; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; -import javax.persistence.Transient; -import javax.persistence.CascadeType; -import javax.persistence.OneToOne; -import javax.persistence.MapsId; -import javax.persistence.FetchType; -import javax.persistence.JoinColumn; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Temporal; +import jakarta.persistence.TemporalType; +import jakarta.persistence.Transient; +import jakarta.persistence.CascadeType; +import jakarta.persistence.OneToOne; +import jakarta.persistence.MapsId; +import jakarta.persistence.FetchType; +import jakarta.persistence.JoinColumn; import java.util.Date; diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java index 850efc2f1ae..c4b4978e0f8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java @@ -18,10 +18,10 @@ import java.util.List; import java.util.Map; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; import org.primefaces.PrimeFaces; //import org.primefaces.context.RequestContext; diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java index a90489be29a..e2b07717358 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java @@ -18,7 +18,6 @@ import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.StringUtil; import java.io.IOException; @@ -29,16 +28,16 @@ import java.util.List; import java.util.UUID; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.faces.context.FacesContext; -import javax.inject.Inject; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; -import javax.servlet.ServletOutputStream; -import javax.servlet.http.HttpServletResponse; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.faces.context.FacesContext; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; +import jakarta.servlet.ServletOutputStream; +import jakarta.servlet.http.HttpServletResponse; import org.primefaces.PrimeFaces; //import org.primefaces.context.RequestContext; diff --git a/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java b/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java index 01131bdca01..461c8b14e46 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java @@ -22,24 +22,24 @@ import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.CascadeType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.JoinTable; -import javax.persistence.ManyToMany; -import javax.persistence.ManyToOne; -import javax.persistence.OneToMany; -import javax.persistence.OrderBy; -import javax.persistence.Table; -import javax.persistence.Transient; -import javax.persistence.Version; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.JoinTable; +import jakarta.persistence.ManyToMany; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.OneToMany; +import jakarta.persistence.OrderBy; +import jakarta.persistence.Table; +import jakarta.persistence.Transient; +import jakarta.persistence.Version; import edu.harvard.iq.dataverse.datavariable.CategoryMetadata; import edu.harvard.iq.dataverse.datavariable.DataVariable; @@ -49,12 +49,12 @@ import edu.harvard.iq.dataverse.util.StringUtil; import java.util.HashSet; import java.util.Set; -import javax.validation.ConstraintViolation; -import javax.validation.Validation; -import javax.validation.Validator; -import javax.validation.ValidatorFactory; +import jakarta.validation.ConstraintViolation; +import jakarta.validation.Validation; +import jakarta.validation.Validator; +import jakarta.validation.ValidatorFactory; import org.hibernate.validator.constraints.NotBlank; -import javax.validation.constraints.Pattern; +import jakarta.validation.constraints.Pattern; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index e460fc86aa4..49c904c3ac3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -30,8 +30,6 @@ import edu.harvard.iq.dataverse.externaltools.ExternalToolServiceBean; import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean; import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry; -import edu.harvard.iq.dataverse.makedatacount.MakeDataCountUtil; -import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; @@ -39,9 +37,8 @@ import edu.harvard.iq.dataverse.util.JsfHelper; import static edu.harvard.iq.dataverse.util.JsfHelper.JH; import edu.harvard.iq.dataverse.util.SystemConfig; -import edu.harvard.iq.dataverse.util.json.JsonUtil; + import java.io.IOException; -import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -49,19 +46,19 @@ import java.util.List; import java.util.Set; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.faces.application.FacesMessage; -import javax.faces.component.UIComponent; -import javax.faces.context.FacesContext; -import javax.faces.validator.ValidatorException; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; -import javax.json.JsonArray; -import javax.json.JsonObject; -import javax.json.JsonValue; -import javax.validation.ConstraintViolation; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.component.UIComponent; +import jakarta.faces.context.FacesContext; +import jakarta.faces.validator.ValidatorException; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.json.JsonArray; +import jakarta.json.JsonObject; +import jakarta.json.JsonValue; +import jakarta.validation.ConstraintViolation; import org.primefaces.PrimeFaces; import org.primefaces.component.tabview.TabView; @@ -80,6 +77,7 @@ public class FilePage implements java.io.Serializable { private FileMetadata fileMetadata; private Long fileId; private String version; + private String toolType; private DataFile file; private GuestbookResponse guestbookResponse; private int selectedTabIndex; @@ -91,6 +89,7 @@ public class FilePage implements java.io.Serializable { private List configureTools; private List exploreTools; private List toolsWithPreviews; + private List queryTools; private Long datasetVersionId; /** * Have the terms been met so that the Preview tab can show the preview? @@ -152,7 +151,6 @@ public String init() { if (fileId != null || persistentId != null) { - // --------------------------------------- // Set the file and datasetVersion // --------------------------------------- @@ -242,13 +240,28 @@ public String init() { } configureTools = externalToolService.findFileToolsByTypeAndContentType(ExternalTool.Type.CONFIGURE, contentType); exploreTools = externalToolService.findFileToolsByTypeAndContentType(ExternalTool.Type.EXPLORE, contentType); + queryTools = externalToolService.findFileToolsByTypeAndContentType(ExternalTool.Type.QUERY, contentType); Collections.sort(exploreTools, CompareExternalToolName); toolsWithPreviews = sortExternalTools(); - if(!toolsWithPreviews.isEmpty()){ - setSelectedTool(toolsWithPreviews.get(0)); + + if (toolType != null) { + if (toolType.equals("PREVIEW")) { + if (!toolsWithPreviews.isEmpty()) { + setSelectedTool(toolsWithPreviews.get(0)); + } + } + if (toolType.equals("QUERY")) { + if (!queryTools.isEmpty()) { + setSelectedTool(queryTools.get(0)); + } + } + } else { + if (!getAllAvailableTools().isEmpty()){ + setSelectedTool(getAllAvailableTools().get(0)); + } } - } else { + } else { return permissionsWrapper.notFound(); } @@ -992,6 +1005,30 @@ public List getToolsWithPreviews() { return toolsWithPreviews; } + public List getQueryTools() { + return queryTools; + } + + + public List getAllAvailableTools(){ + List externalTools = new ArrayList<>(); + externalTools.addAll(queryTools); + for (ExternalTool pt : toolsWithPreviews){ + if (!externalTools.contains(pt)){ + externalTools.add(pt); + } + } + return externalTools; + } + + public String getToolType() { + return toolType; + } + + public void setToolType(String toolType) { + this.toolType = toolType; + } + private ExternalTool selectedTool; public ExternalTool getSelectedTool() { @@ -1184,7 +1221,22 @@ public String getEmbargoPhrase() { return BundleUtil.getStringFromBundle("embargoed.willbeuntil"); } } - + + public String getToolTabTitle(){ + if (getAllAvailableTools().size() > 1) { + return BundleUtil.getStringFromBundle("file.toolTab.header"); + } + if( getSelectedTool() != null ){ + if(getSelectedTool().isPreviewTool()){ + return BundleUtil.getStringFromBundle("file.previewTab.header"); + } + if(getSelectedTool().isQueryTool()){ + return BundleUtil.getStringFromBundle("file.queryTab.header"); + } + } + return BundleUtil.getStringFromBundle("file.toolTab.header"); + } + public String getIngestMessage() { return BundleUtil.getStringFromBundle("file.ingestFailed.message", Arrays.asList(settingsWrapper.getGuidesBaseUrl(), settingsWrapper.getGuidesVersion())); } @@ -1194,4 +1246,13 @@ public boolean isHasPublicStore() { return settingsWrapper.isTrueForKey(SettingsServiceBean.Key.PublicInstall, StorageIO.isPublicStore(DataAccess.getStorageDriverFromIdentifier(file.getStorageIdentifier()))); } + /** + * This method only exists because in file-edit-button-fragment.xhtml we + * call bean.editFileMetadata() and we need both FilePage (this bean) and + * DatasetPage to have the method defined to prevent errors in server.log. + */ + public String editFileMetadata(){ + return ""; + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/FileUploadRenderer.java b/src/main/java/edu/harvard/iq/dataverse/FileUploadRenderer.java index 5e73ef65f25..ce3b0d65875 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileUploadRenderer.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileUploadRenderer.java @@ -6,8 +6,8 @@ package edu.harvard.iq.dataverse; -import javax.faces.component.UIComponent; -import javax.faces.context.FacesContext; +import jakarta.faces.component.UIComponent; +import jakarta.faces.context.FacesContext; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFieldMapping.java b/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFieldMapping.java index 40d219d2638..db83ab953a1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFieldMapping.java +++ b/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFieldMapping.java @@ -3,13 +3,8 @@ package edu.harvard.iq.dataverse; import java.io.Serializable; -import javax.persistence.*; +import jakarta.persistence.*; import java.util.Collection; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TreeMap; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFormatMapping.java b/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFormatMapping.java index 0fac75257c8..eb7b97b1a84 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFormatMapping.java +++ b/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFormatMapping.java @@ -7,18 +7,18 @@ package edu.harvard.iq.dataverse; import java.io.Serializable; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; import java.util.List; -import javax.persistence.CascadeType; -import javax.persistence.Column; -import javax.persistence.Index; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.OneToMany; -import javax.persistence.Table; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Column; +import jakarta.persistence.Index; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.OneToMany; +import jakarta.persistence.Table; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/Guestbook.java b/src/main/java/edu/harvard/iq/dataverse/Guestbook.java index 18913bfd5bf..2ef23d1f925 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Guestbook.java +++ b/src/main/java/edu/harvard/iq/dataverse/Guestbook.java @@ -3,27 +3,25 @@ import edu.harvard.iq.dataverse.util.BundleUtil; import java.io.Serializable; -import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; -import javax.persistence.CascadeType; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.OneToMany; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.OneToMany; import java.util.List; import java.util.Objects; -import javax.persistence.Column; -import javax.persistence.ManyToOne; -import javax.persistence.OrderBy; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; -import javax.persistence.Transient; +import jakarta.persistence.Column; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.OrderBy; +import jakarta.persistence.Temporal; +import jakarta.persistence.TemporalType; +import jakarta.persistence.Transient; import edu.harvard.iq.dataverse.util.DateUtil; -import org.apache.commons.text.StringEscapeUtils; import org.hibernate.validator.constraints.NotBlank; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java index 7cbb69e5c1d..9fb584a9133 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java @@ -17,13 +17,13 @@ import java.util.Iterator; import java.util.List; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.faces.application.FacesMessage; -import javax.faces.context.FacesContext; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.context.FacesContext; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; import org.apache.commons.lang3.StringUtils; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java index 69404482fce..0057fbeddab 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java @@ -13,8 +13,8 @@ import java.util.ArrayList; import java.util.Date; import java.util.List; -import javax.persistence.*; -import javax.validation.constraints.Size; +import jakarta.persistence.*; +import jakarta.validation.constraints.Size; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java index 2f795a4da74..bd598d2dca0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java @@ -21,17 +21,17 @@ import java.util.List; import java.util.Map; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.ejb.TransactionAttribute; -import javax.ejb.TransactionAttributeType; -import javax.faces.model.SelectItem; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; -import javax.persistence.StoredProcedureQuery; -import javax.persistence.TypedQuery; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.ejb.TransactionAttribute; +import jakarta.ejb.TransactionAttributeType; +import jakarta.faces.model.SelectItem; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; +import jakarta.persistence.StoredProcedureQuery; +import jakarta.persistence.TypedQuery; import org.apache.commons.text.StringEscapeUtils; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponsesPage.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponsesPage.java index 23aac4a24a3..c53df93def8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponsesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponsesPage.java @@ -6,20 +6,19 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseCommand; -import static edu.harvard.iq.dataverse.util.JsfHelper.JH; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import java.util.List; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.faces.application.FacesMessage; -import javax.faces.context.FacesContext; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; -import javax.servlet.ServletOutputStream; -import javax.servlet.http.HttpServletResponse; +import jakarta.ejb.EJB; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.context.FacesContext; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.servlet.ServletOutputStream; +import jakarta.servlet.http.HttpServletResponse; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookServiceBean.java index 5394ddc652a..fcd4e91d455 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookServiceBean.java @@ -5,12 +5,11 @@ */ package edu.harvard.iq.dataverse; -import java.util.List; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/HandlenetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/HandlenetServiceBean.java index d2149a3072a..4942db9e7ec 100644 --- a/src/main/java/edu/harvard/iq/dataverse/HandlenetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/HandlenetServiceBean.java @@ -29,8 +29,8 @@ import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; import java.security.PrivateKey; /* Handlenet imports: */ diff --git a/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java b/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java index 5be7578f7f8..f008db1403f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java @@ -27,17 +27,17 @@ import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Pattern; -import javax.ejb.EJB; -import javax.faces.application.FacesMessage; -import javax.faces.component.UIComponent; -import javax.faces.component.UIInput; -import javax.faces.context.FacesContext; -import javax.faces.event.ActionEvent; -import javax.faces.model.SelectItem; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; -import javax.servlet.http.HttpServletRequest; +import jakarta.ejb.EJB; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.component.UIComponent; +import jakarta.faces.component.UIInput; +import jakarta.faces.context.FacesContext; +import jakarta.faces.event.ActionEvent; +import jakarta.faces.model.SelectItem; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.servlet.http.HttpServletRequest; import org.apache.commons.lang3.StringUtils; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/HarvestingDataverseConfig.java b/src/main/java/edu/harvard/iq/dataverse/HarvestingDataverseConfig.java index 28df6e19e65..6709b978c47 100644 --- a/src/main/java/edu/harvard/iq/dataverse/HarvestingDataverseConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/HarvestingDataverseConfig.java @@ -6,16 +6,16 @@ package edu.harvard.iq.dataverse; import java.io.Serializable; -import javax.persistence.CascadeType; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.OneToOne; -import javax.persistence.Table; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.OneToOne; +import jakarta.persistence.Table; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/HarvestingSetsPage.java b/src/main/java/edu/harvard/iq/dataverse/HarvestingSetsPage.java index 432683a5797..6dbba34920b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/HarvestingSetsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/HarvestingSetsPage.java @@ -6,11 +6,6 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; -import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -import edu.harvard.iq.dataverse.engine.command.impl.CreateHarvestingClientCommand; -import edu.harvard.iq.dataverse.engine.command.impl.UpdateHarvestingClientCommand; -import edu.harvard.iq.dataverse.harvest.client.HarvestingClient; -import edu.harvard.iq.dataverse.harvest.client.HarvestingClientServiceBean; import edu.harvard.iq.dataverse.harvest.server.OAIRecord; import edu.harvard.iq.dataverse.harvest.server.OAIRecordServiceBean; import edu.harvard.iq.dataverse.harvest.server.OAISet; @@ -26,15 +21,15 @@ import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Pattern; -import javax.ejb.EJB; -import javax.faces.application.FacesMessage; -import javax.faces.component.UIComponent; -import javax.faces.component.UIInput; -import javax.faces.context.FacesContext; -import javax.faces.event.ActionEvent; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.component.UIComponent; +import jakarta.faces.component.UIInput; +import jakarta.faces.context.FacesContext; +import jakarta.faces.event.ActionEvent; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; import org.apache.commons.lang3.StringUtils; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/HomepageServlet.java b/src/main/java/edu/harvard/iq/dataverse/HomepageServlet.java index ef9b3267db4..e1864194436 100644 --- a/src/main/java/edu/harvard/iq/dataverse/HomepageServlet.java +++ b/src/main/java/edu/harvard/iq/dataverse/HomepageServlet.java @@ -7,12 +7,12 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import java.io.IOException; -import javax.ejb.EJB; -import javax.servlet.RequestDispatcher; -import javax.servlet.ServletException; -import javax.servlet.http.HttpServlet; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; +import jakarta.ejb.EJB; +import jakarta.servlet.RequestDispatcher; +import jakarta.servlet.ServletException; +import jakarta.servlet.http.HttpServlet; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/LinkValidator.java b/src/main/java/edu/harvard/iq/dataverse/LinkValidator.java index 2ecfc55f67e..7d540f0a425 100644 --- a/src/main/java/edu/harvard/iq/dataverse/LinkValidator.java +++ b/src/main/java/edu/harvard/iq/dataverse/LinkValidator.java @@ -5,13 +5,13 @@ */ package edu.harvard.iq.dataverse; -import javax.faces.application.FacesMessage; -import javax.faces.component.UIComponent; -import javax.faces.component.UIInput; -import javax.faces.context.FacesContext; -import javax.faces.validator.FacesValidator; -import javax.faces.validator.Validator; -import javax.faces.validator.ValidatorException; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.component.UIComponent; +import jakarta.faces.component.UIInput; +import jakarta.faces.context.FacesContext; +import jakarta.faces.validator.FacesValidator; +import jakarta.faces.validator.Validator; +import jakarta.faces.validator.ValidatorException; import edu.harvard.iq.dataverse.util.BundleUtil; @FacesValidator(value = "linkValidator") diff --git a/src/main/java/edu/harvard/iq/dataverse/LoginPage.java b/src/main/java/edu/harvard/iq/dataverse/LoginPage.java index 2420ce08550..16d2cc53cb9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/LoginPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/LoginPage.java @@ -9,30 +9,27 @@ import edu.harvard.iq.dataverse.authorization.exceptions.AuthenticationFailedException; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinAuthenticationProvider; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean; -import edu.harvard.iq.dataverse.authorization.providers.shib.ShibAuthenticationProvider; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.JsfHelper; -import edu.harvard.iq.dataverse.util.SessionUtil; -import static edu.harvard.iq.dataverse.util.JsfHelper.JH; import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.faces.application.FacesMessage; -import javax.faces.component.UIComponent; -import javax.faces.context.FacesContext; -import javax.faces.event.AjaxBehaviorEvent; -import javax.faces.validator.ValidatorException; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; -import javax.servlet.http.HttpServletRequest; +import jakarta.ejb.EJB; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.component.UIComponent; +import jakarta.faces.context.FacesContext; +import jakarta.faces.event.AjaxBehaviorEvent; +import jakarta.faces.validator.ValidatorException; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.servlet.http.HttpServletRequest; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java index bc7b34ee8b7..f17732df7b6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java @@ -5,7 +5,6 @@ */ package edu.harvard.iq.dataverse; -import com.sun.mail.smtp.SMTPSendFailedException; import edu.harvard.iq.dataverse.authorization.groups.Group; import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; @@ -24,25 +23,23 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Date; -import java.util.Properties; -import java.util.Map; -import java.util.HashMap; import java.util.List; import java.util.Set; import java.util.logging.Logger; -import javax.annotation.Resource; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.mail.Address; -import javax.mail.Message; -import javax.mail.MessagingException; -import javax.mail.Session; -import javax.mail.Transport; -import javax.mail.internet.AddressException; -import javax.mail.internet.InternetAddress; -import javax.mail.internet.MimeMessage; +import jakarta.annotation.Resource; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.mail.Address; +import jakarta.mail.Message; +import jakarta.mail.MessagingException; +import jakarta.mail.Session; +import jakarta.mail.Transport; +import jakarta.mail.internet.AddressException; +import jakarta.mail.internet.InternetAddress; +import jakarta.mail.internet.MimeMessage; import edu.harvard.iq.dataverse.validation.EMailValidator; +import jakarta.json.JsonObject; import org.apache.commons.lang3.StringUtils; /** @@ -124,9 +121,9 @@ public boolean sendSystemEmail(String to, String subject, String messageText, bo try { Transport.send(msg, recipients); sent = true; - } catch (SMTPSendFailedException ssfe) { + } catch (MessagingException ssfe) { logger.warning("Failed to send mail to: " + to); - logger.warning("SMTPSendFailedException Message: " + ssfe); + logger.warning("MessagingException Message: " + ssfe); } } else { logger.fine("Skipping sending mail to " + to + ", because the \"no-reply\" address not set (" + Key.SystemEmail + " setting)."); @@ -616,7 +613,7 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio case DATASETMENTIONED: String additionalInfo = userNotification.getAdditionalInfo(); dataset = (Dataset) targetObject; - javax.json.JsonObject citingResource = null; + JsonObject citingResource = null; citingResource = JsonUtil.getJsonObject(additionalInfo); diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java index fd309790026..1b4af29c915 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java @@ -12,7 +12,6 @@ import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.RoleAssignee; import edu.harvard.iq.dataverse.authorization.RoleAssigneeDisplayInfo; -import edu.harvard.iq.dataverse.authorization.groups.Group; import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean; import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; @@ -28,14 +27,14 @@ import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.faces.application.FacesMessage; -import javax.faces.event.ActionEvent; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; +import jakarta.ejb.EJB; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.event.ActionEvent; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; import org.apache.commons.lang3.ObjectUtils; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageGroupsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageGroupsPage.java index 8513ca33b47..583e195ab0d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManageGroupsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManageGroupsPage.java @@ -22,17 +22,17 @@ import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Pattern; -import javax.ejb.EJB; -import javax.faces.application.FacesMessage; -import javax.faces.component.UIComponent; -import javax.faces.component.UIInput; -import javax.faces.context.FacesContext; -import javax.faces.event.ActionEvent; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; +import jakarta.ejb.EJB; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.component.UIComponent; +import jakarta.faces.component.UIInput; +import jakarta.faces.context.FacesContext; +import jakarta.faces.event.ActionEvent; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; import org.apache.commons.lang3.StringUtils; diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageGuestbooksPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageGuestbooksPage.java index 7db0ecc0767..cc89cfd9d56 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManageGuestbooksPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManageGuestbooksPage.java @@ -11,17 +11,19 @@ import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.faces.application.FacesMessage; -import javax.faces.context.FacesContext; -import javax.faces.event.ActionEvent; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.servlet.ServletOutputStream; -import javax.servlet.http.HttpServletResponse; +import jakarta.ejb.EJB; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.context.FacesContext; +import jakarta.faces.event.AbortProcessingException; +import jakarta.faces.event.ActionEvent; +import jakarta.faces.event.AjaxBehaviorEvent; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.servlet.ServletOutputStream; +import jakarta.servlet.http.HttpServletResponse; /** * @@ -325,7 +327,7 @@ public void setDisplayDownloadAll(boolean displayDownloadAll) { this.displayDownloadAll = displayDownloadAll; } - public String updateGuestbooksRoot(javax.faces.event.AjaxBehaviorEvent event) throws javax.faces.event.AbortProcessingException { + public String updateGuestbooksRoot(AjaxBehaviorEvent event) throws AbortProcessingException { try { dataverse = engineService.submit( new UpdateDataverseGuestbookRootCommand(!isInheritGuestbooksValue(), diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java index e71e04bc42f..bf78b9d088f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java @@ -29,18 +29,17 @@ import java.util.Date; import java.util.LinkedList; import java.util.List; -import java.util.ResourceBundle; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.faces.application.FacesMessage; -import javax.faces.event.ActionEvent; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; +import jakarta.ejb.EJB; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.event.ActionEvent; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; import org.apache.commons.text.StringEscapeUtils; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageTemplatesPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageTemplatesPage.java index 37ee7948a14..98369a2eab3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManageTemplatesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManageTemplatesPage.java @@ -14,15 +14,17 @@ import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.faces.application.FacesMessage; -import javax.faces.event.ActionEvent; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; +import jakarta.ejb.EJB; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.event.ActionEvent; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; import edu.harvard.iq.dataverse.util.BundleUtil; +import jakarta.faces.event.AbortProcessingException; +import jakarta.faces.event.AjaxBehaviorEvent; /** * * @author skraffmiller @@ -237,7 +239,7 @@ public void viewSelectedTemplate(Template selectedTemplate) { tempPage.setTemplate(selectedTemplate); } - public String updateTemplatesRoot(javax.faces.event.AjaxBehaviorEvent event) throws javax.faces.event.AbortProcessingException { + public String updateTemplatesRoot(AjaxBehaviorEvent event) throws AbortProcessingException { try { if (dataverse.getOwner() != null) { if (isInheritTemplatesValue() && dataverse.getDefaultTemplate() == null && dataverse.getOwner().getDefaultTemplate() != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/MetadataBlock.java b/src/main/java/edu/harvard/iq/dataverse/MetadataBlock.java index 33e75efffb5..0fd7c2efbc7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MetadataBlock.java +++ b/src/main/java/edu/harvard/iq/dataverse/MetadataBlock.java @@ -8,21 +8,21 @@ import java.util.List; import java.util.MissingResourceException; import java.util.Objects; -import javax.persistence.CascadeType; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.OneToMany; -import javax.persistence.OneToOne; -import javax.persistence.OrderBy; -import javax.persistence.Table; -import javax.persistence.Transient; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.OneToMany; +import jakarta.persistence.OneToOne; +import jakarta.persistence.OrderBy; +import jakarta.persistence.Table; +import jakarta.persistence.Transient; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockConverter.java b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockConverter.java index c5bd48ae785..49c50e82efb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockConverter.java @@ -5,13 +5,13 @@ */ package edu.harvard.iq.dataverse; -import javax.ejb.EJB; -import javax.enterprise.inject.spi.CDI; +import jakarta.ejb.EJB; +import jakarta.enterprise.inject.spi.CDI; -import javax.faces.component.UIComponent; -import javax.faces.context.FacesContext; -import javax.faces.convert.Converter; -import javax.faces.convert.FacesConverter; +import jakarta.faces.component.UIComponent; +import jakarta.faces.context.FacesContext; +import jakarta.faces.convert.Converter; +import jakarta.faces.convert.FacesConverter; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java index f34637dbfaf..bb6daa264ba 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java @@ -1,11 +1,11 @@ package edu.harvard.iq.dataverse; import java.util.List; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.PersistenceContext; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.PersistenceContext; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/Metric.java b/src/main/java/edu/harvard/iq/dataverse/Metric.java index 5526604f77c..0e71ab44db4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Metric.java +++ b/src/main/java/edu/harvard/iq/dataverse/Metric.java @@ -5,21 +5,20 @@ */ package edu.harvard.iq.dataverse; -import java.io.IOException; import java.io.Serializable; import java.sql.Timestamp; import java.util.Date; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.Table; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Table; +import jakarta.persistence.Temporal; +import jakarta.persistence.TemporalType; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java b/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java index 37a11396f37..d2c522b5c89 100644 --- a/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java @@ -14,12 +14,12 @@ import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; -import javax.faces.context.FacesContext; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; +import jakarta.faces.context.FacesContext; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; import org.apache.commons.lang3.StringUtils; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/PackagePopupFragmentBean.java b/src/main/java/edu/harvard/iq/dataverse/PackagePopupFragmentBean.java index fac2abeddb8..0c5218fb927 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PackagePopupFragmentBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PackagePopupFragmentBean.java @@ -6,8 +6,8 @@ package edu.harvard.iq.dataverse; -import javax.faces.view.ViewScoped; -import javax.inject.Named; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Named; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index 8c0a0bf90b0..a1de33a764e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -1,6 +1,5 @@ package edu.harvard.iq.dataverse; -import edu.harvard.iq.dataverse.DatasetLock.Reason; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean; @@ -17,14 +16,14 @@ import java.util.Map; import java.util.Set; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Inject; +import jakarta.inject.Named; import java.util.HashSet; import java.util.List; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; import static edu.harvard.iq.dataverse.engine.command.CommandHelper.CH; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; @@ -41,7 +40,7 @@ import java.util.logging.Level; import java.util.stream.Collectors; import static java.util.stream.Collectors.toList; -import javax.persistence.Query; +import jakarta.persistence.Query; /** * Your one-stop-shop for deciding which user can do what action on which diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/PermissionsWrapper.java index 4ee45fc85a1..5ce9edb3a9e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionsWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionsWrapper.java @@ -14,10 +14,10 @@ import java.util.HashMap; import java.util.Map; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeConverter.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeConverter.java index a5e4cebbd95..0d863f47324 100644 --- a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeConverter.java @@ -7,13 +7,13 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.authorization.RoleAssignee; -import javax.ejb.EJB; -import javax.enterprise.inject.spi.CDI; +import jakarta.ejb.EJB; +import jakarta.enterprise.inject.spi.CDI; -import javax.faces.component.UIComponent; -import javax.faces.context.FacesContext; -import javax.faces.convert.Converter; -import javax.faces.convert.FacesConverter; +import jakarta.faces.component.UIComponent; +import jakarta.faces.context.FacesContext; +import jakarta.faces.convert.Converter; +import jakarta.faces.convert.FacesConverter; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java index c6f2b7f28a5..059d5a8ffd3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java @@ -21,12 +21,12 @@ import java.util.TreeMap; import java.util.logging.Logger; import java.util.stream.Collectors; -import javax.annotation.PostConstruct; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; +import jakarta.annotation.PostConstruct; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; import org.apache.commons.lang3.StringUtils; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java index f053a449da4..df004fe1357 100644 --- a/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java +++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java @@ -3,19 +3,19 @@ import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.RoleAssignee; import java.util.Objects; -import javax.persistence.CascadeType; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.Table; -import javax.persistence.UniqueConstraint; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.Table; +import jakarta.persistence.UniqueConstraint; /** * A role of a user in a Dataverse. A User may have many roles in a given Dataverse. diff --git a/src/main/java/edu/harvard/iq/dataverse/RolePermissionFragment.java b/src/main/java/edu/harvard/iq/dataverse/RolePermissionFragment.java index dd3044d3749..1bd337452c2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/RolePermissionFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/RolePermissionFragment.java @@ -26,16 +26,18 @@ import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.faces.application.FacesMessage; -import javax.faces.event.ActionEvent; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; +import jakarta.ejb.EJB; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.event.ActionEvent; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; import edu.harvard.iq.dataverse.util.BundleUtil; +import jakarta.faces.event.AbortProcessingException; +import jakarta.faces.event.AjaxBehaviorEvent; import org.apache.commons.text.StringEscapeUtils; import org.apache.commons.lang3.StringUtils; @@ -92,7 +94,7 @@ public void setInheritAssignments(boolean inheritAssignments) { this.inheritAssignments = inheritAssignments; } - public void updatePermissionRoot(javax.faces.event.AjaxBehaviorEvent event) throws javax.faces.event.AbortProcessingException { + public void updatePermissionRoot(AjaxBehaviorEvent event) throws AbortProcessingException { try { dvObject = commandEngine.submit( new UpdatePermissionRootCommand(!inheritAssignments, diff --git a/src/main/java/edu/harvard/iq/dataverse/S3PackageImporter.java b/src/main/java/edu/harvard/iq/dataverse/S3PackageImporter.java index a099f5f3939..71318a0184a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/S3PackageImporter.java +++ b/src/main/java/edu/harvard/iq/dataverse/S3PackageImporter.java @@ -17,9 +17,6 @@ import com.amazonaws.services.s3.model.S3Object; import com.amazonaws.services.s3.model.S3ObjectSummary; import edu.harvard.iq.dataverse.api.AbstractApiBean; -import edu.harvard.iq.dataverse.batch.jobs.importer.filesystem.FileRecordWriter; -import edu.harvard.iq.dataverse.engine.command.DataverseRequest; -import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.FileUtil; import java.io.BufferedReader; @@ -31,9 +28,9 @@ import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; /** * This class is for importing files added to s3 outside of dataverse. diff --git a/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java b/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java index 13a7cc51357..6be768321c4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java +++ b/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java @@ -11,16 +11,16 @@ import java.util.Optional; import java.util.Random; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.faces.application.FacesMessage; -import javax.faces.component.UIComponent; -import javax.faces.context.FacesContext; -import javax.faces.event.ActionEvent; -import javax.faces.validator.ValidatorException; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; -import javax.mail.internet.InternetAddress; +import jakarta.ejb.EJB; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.component.UIComponent; +import jakarta.faces.context.FacesContext; +import jakarta.faces.event.ActionEvent; +import jakarta.faces.validator.ValidatorException; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.mail.internet.InternetAddress; import org.apache.commons.validator.routines.EmailValidator; @ViewScoped diff --git a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java index 14b429f3219..307301049f0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java @@ -27,17 +27,16 @@ import java.util.logging.Logger; import java.util.Set; -import javax.ejb.EJB; -import javax.faces.application.FacesMessage; -import javax.faces.component.UIComponent; -import javax.faces.component.UIInput; -import javax.faces.context.FacesContext; -import javax.faces.validator.ValidatorException; -import javax.faces.view.ViewScoped; -import javax.inject.Named; -import javax.json.Json; -import javax.json.JsonObject; -import javax.mail.internet.InternetAddress; +import jakarta.ejb.EJB; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.component.UIComponent; +import jakarta.faces.component.UIInput; +import jakarta.faces.context.FacesContext; +import jakarta.faces.validator.ValidatorException; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Named; +import jakarta.json.JsonObject; +import jakarta.mail.internet.InternetAddress; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/Shib.java b/src/main/java/edu/harvard/iq/dataverse/Shib.java index 0f0e20aba94..bee1182e248 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Shib.java +++ b/src/main/java/edu/harvard/iq/dataverse/Shib.java @@ -25,15 +25,15 @@ import java.util.Date; import java.util.List; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.faces.application.FacesMessage; -import javax.faces.context.ExternalContext; -import javax.faces.context.FacesContext; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; -import javax.servlet.http.HttpServletRequest; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.context.ExternalContext; +import jakarta.faces.context.FacesContext; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.servlet.http.HttpServletRequest; @ViewScoped @Named("Shib") diff --git a/src/main/java/edu/harvard/iq/dataverse/SuperUserPage.java b/src/main/java/edu/harvard/iq/dataverse/SuperUserPage.java index cd6d53fd8a8..adf2e7d3010 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SuperUserPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/SuperUserPage.java @@ -6,11 +6,11 @@ import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; -import javax.ejb.EJB; -import javax.enterprise.context.SessionScoped; -import javax.inject.Inject; -import javax.inject.Named; -import javax.json.JsonObjectBuilder; +import jakarta.ejb.EJB; +import jakarta.enterprise.context.SessionScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.json.JsonObjectBuilder; @SessionScoped @Named("SuperUserPage") diff --git a/src/main/java/edu/harvard/iq/dataverse/Template.java b/src/main/java/edu/harvard/iq/dataverse/Template.java index cc1190d36d9..05c6df51197 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Template.java +++ b/src/main/java/edu/harvard/iq/dataverse/Template.java @@ -12,31 +12,31 @@ import java.util.TreeMap; import java.util.stream.Collectors; -import javax.json.Json; -import javax.json.JsonObjectBuilder; -import javax.json.JsonString; -import javax.persistence.CascadeType; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.OneToMany; -import javax.persistence.OneToOne; -import javax.persistence.Table; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; -import javax.persistence.Transient; -import javax.validation.constraints.Size; +import jakarta.json.Json; +import jakarta.json.JsonObjectBuilder; +import jakarta.json.JsonString; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.OneToMany; +import jakarta.persistence.OneToOne; +import jakarta.persistence.Table; +import jakarta.persistence.Temporal; +import jakarta.persistence.TemporalType; +import jakarta.persistence.Transient; +import jakarta.validation.constraints.Size; import edu.harvard.iq.dataverse.util.DateUtil; import edu.harvard.iq.dataverse.util.json.JsonUtil; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; import org.hibernate.validator.constraints.NotBlank; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/TemplateConverter.java b/src/main/java/edu/harvard/iq/dataverse/TemplateConverter.java index 98b24f84801..1d855e029ce 100644 --- a/src/main/java/edu/harvard/iq/dataverse/TemplateConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/TemplateConverter.java @@ -6,12 +6,12 @@ package edu.harvard.iq.dataverse; -import javax.ejb.EJB; -import javax.enterprise.inject.spi.CDI; -import javax.faces.component.UIComponent; -import javax.faces.context.FacesContext; -import javax.faces.convert.Converter; -import javax.faces.convert.FacesConverter; +import jakarta.ejb.EJB; +import jakarta.enterprise.inject.spi.CDI; +import jakarta.faces.component.UIComponent; +import jakarta.faces.context.FacesContext; +import jakarta.faces.convert.Converter; +import jakarta.faces.convert.FacesConverter; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java b/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java index 56898943467..fff520fd259 100644 --- a/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java @@ -15,12 +15,12 @@ import java.util.Date; import java.util.List; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.faces.application.FacesMessage; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/TemplateServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/TemplateServiceBean.java index f2ac8779d2d..46382fc2588 100644 --- a/src/main/java/edu/harvard/iq/dataverse/TemplateServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/TemplateServiceBean.java @@ -2,14 +2,13 @@ import edu.harvard.iq.dataverse.search.IndexServiceBean; import java.util.List; -import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.TypedQuery; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.TypedQuery; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java index a8616283332..ee865770dbe 100644 --- a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java +++ b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java @@ -6,15 +6,15 @@ package edu.harvard.iq.dataverse; import java.io.Serializable; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.OneToOne; -import javax.persistence.Transient; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.OneToOne; +import jakarta.persistence.Transient; import edu.harvard.iq.dataverse.license.License; diff --git a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java index 2cf78db0f03..ca38a305d63 100644 --- a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java +++ b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java @@ -6,8 +6,8 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.util.BundleUtil; -import javax.validation.ConstraintValidator; -import javax.validation.ConstraintValidatorContext; +import jakarta.validation.ConstraintValidator; +import jakarta.validation.ConstraintValidatorContext; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/ThemeWidgetFragment.java b/src/main/java/edu/harvard/iq/dataverse/ThemeWidgetFragment.java index e270d3842f6..9a62a99722a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThemeWidgetFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThemeWidgetFragment.java @@ -18,16 +18,16 @@ import java.nio.file.StandardCopyOption; import java.util.logging.Level; import java.util.logging.Logger; -import javax.annotation.PreDestroy; -import javax.ejb.EJB; -import javax.faces.application.FacesMessage; -import javax.faces.component.UIComponent; -import javax.faces.component.html.HtmlInputText; -import javax.faces.context.FacesContext; -import javax.faces.validator.ValidatorException; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.annotation.PreDestroy; +import jakarta.ejb.EJB; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.component.UIComponent; +import jakarta.faces.component.html.HtmlInputText; +import jakarta.faces.context.FacesContext; +import jakarta.faces.validator.ValidatorException; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; import org.apache.commons.lang3.StringUtils; import org.primefaces.PrimeFaces; diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java index 6c8db8c124b..319ae06eefb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java @@ -8,24 +8,21 @@ import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; -import edu.harvard.iq.dataverse.dataset.DatasetUtil; + import static edu.harvard.iq.dataverse.dataset.DatasetUtil.datasetLogoThumbnail; import edu.harvard.iq.dataverse.search.SolrSearchResult; import edu.harvard.iq.dataverse.util.FileUtil; -import java.io.File; + import java.io.IOException; import java.io.InputStream; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; import java.util.Base64; import java.util.HashMap; import java.util.Map; -import javax.ejb.EJB; -import javax.enterprise.context.RequestScoped; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.enterprise.context.RequestScoped; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; import org.apache.commons.io.IOUtils; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/UserBannerMessage.java b/src/main/java/edu/harvard/iq/dataverse/UserBannerMessage.java index 7bd4f2d898f..888669ee615 100644 --- a/src/main/java/edu/harvard/iq/dataverse/UserBannerMessage.java +++ b/src/main/java/edu/harvard/iq/dataverse/UserBannerMessage.java @@ -4,15 +4,15 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import java.io.Serializable; import java.util.Date; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.OneToOne; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.OneToOne; +import jakarta.persistence.Temporal; +import jakarta.persistence.TemporalType; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java index b68a1b9d13e..a87404b69a7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java +++ b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java @@ -12,17 +12,17 @@ import java.util.Collections; import java.util.HashSet; import java.util.stream.Collectors; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.Enumerated; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.Table; -import javax.persistence.Transient; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.Enumerated; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Table; +import jakarta.persistence.Transient; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java index 947ee3ce989..a2a71ff8b40 100644 --- a/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java @@ -14,16 +14,16 @@ import java.sql.Timestamp; import java.util.List; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.ejb.TransactionAttribute; -import javax.ejb.TransactionAttributeType; -import javax.inject.Inject; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; -import javax.persistence.TypedQuery; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.ejb.TransactionAttribute; +import jakarta.ejb.TransactionAttributeType; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; +import jakarta.persistence.TypedQuery; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java index 2d8ecf64f76..93892376edc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java @@ -12,16 +12,15 @@ import java.util.Date; import java.util.logging.Logger; import java.util.stream.Collectors; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.ejb.TransactionAttribute; -import javax.ejb.TransactionAttributeType; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.ejb.TransactionAttribute; +import jakarta.ejb.TransactionAttributeType; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; import org.apache.commons.lang3.StringUtils; -import org.ocpsoft.common.util.Strings; @Stateless @Named @@ -190,7 +189,7 @@ private HashMap> retrieveRolesForUsers(List userO // Add '@' to each identifier and delimit the list by "," // ------------------------------------------------- String identifierListString = userIdentifierList.stream() - .filter(x -> !Strings.isNullOrEmpty(x)) + .filter(x -> x != null && !x.isEmpty()) .map(x -> "'@" + x + "'") .collect(Collectors.joining(", ")); diff --git a/src/main/java/edu/harvard/iq/dataverse/ValidateDataFileDirectoryName.java b/src/main/java/edu/harvard/iq/dataverse/ValidateDataFileDirectoryName.java index 94e33d6220a..9e8ce42491d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ValidateDataFileDirectoryName.java +++ b/src/main/java/edu/harvard/iq/dataverse/ValidateDataFileDirectoryName.java @@ -10,8 +10,8 @@ import java.lang.annotation.Retention; import static java.lang.annotation.RetentionPolicy.RUNTIME; import java.lang.annotation.Target; -import javax.validation.Constraint; -import javax.validation.Payload; +import jakarta.validation.Constraint; +import jakarta.validation.Payload; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/ValidateDatasetFieldType.java b/src/main/java/edu/harvard/iq/dataverse/ValidateDatasetFieldType.java index ae7b4a1eaef..f36a1d9541e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ValidateDatasetFieldType.java +++ b/src/main/java/edu/harvard/iq/dataverse/ValidateDatasetFieldType.java @@ -13,8 +13,8 @@ import java.lang.annotation.Retention; import java.lang.annotation.Target; -import javax.validation.Constraint; -import javax.validation.Payload; +import jakarta.validation.Constraint; +import jakarta.validation.Payload; @Target({TYPE, ANNOTATION_TYPE}) @Retention(RUNTIME) diff --git a/src/main/java/edu/harvard/iq/dataverse/ValidateTermsOfUseAndAccess.java b/src/main/java/edu/harvard/iq/dataverse/ValidateTermsOfUseAndAccess.java index 8717d10fc8d..f55e93af674 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ValidateTermsOfUseAndAccess.java +++ b/src/main/java/edu/harvard/iq/dataverse/ValidateTermsOfUseAndAccess.java @@ -10,8 +10,8 @@ import java.lang.annotation.Retention; import static java.lang.annotation.RetentionPolicy.RUNTIME; import java.lang.annotation.Target; -import javax.validation.Constraint; -import javax.validation.Payload; +import jakarta.validation.Constraint; +import jakarta.validation.Payload; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/ValidateVersionNote.java b/src/main/java/edu/harvard/iq/dataverse/ValidateVersionNote.java index 405a7feb52f..c8d64d4a642 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ValidateVersionNote.java +++ b/src/main/java/edu/harvard/iq/dataverse/ValidateVersionNote.java @@ -11,8 +11,8 @@ import java.lang.annotation.Retention; import static java.lang.annotation.RetentionPolicy.RUNTIME; import java.lang.annotation.Target; -import javax.validation.Constraint; -import javax.validation.Payload; +import jakarta.validation.Constraint; +import jakarta.validation.Payload; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/WidgetWrapper.java b/src/main/java/edu/harvard/iq/dataverse/WidgetWrapper.java index 743d8f2d092..a8ea5fabde4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/WidgetWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/WidgetWrapper.java @@ -5,10 +5,10 @@ */ package edu.harvard.iq.dataverse; -import javax.faces.context.FacesContext; -import javax.faces.view.ViewScoped; -import javax.inject.Named; -import javax.servlet.http.HttpServletResponse; +import jakarta.faces.context.FacesContext; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Named; +import jakarta.servlet.http.HttpServletResponse; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecord.java b/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecord.java index 31a9ad25e5b..6743c3f2143 100644 --- a/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecord.java +++ b/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecord.java @@ -3,16 +3,16 @@ import java.util.Date; import java.util.Objects; import java.util.UUID; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.EnumType; -import javax.persistence.Enumerated; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.PrePersist; -import javax.persistence.Table; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.PrePersist; +import jakarta.persistence.Table; +import jakarta.persistence.Temporal; +import jakarta.persistence.TemporalType; /** * Logs a single action in the action log. diff --git a/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogServiceBean.java index ba19fdd9eeb..2d16f52bb09 100644 --- a/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogServiceBean.java @@ -1,11 +1,11 @@ package edu.harvard.iq.dataverse.actionlogging; import java.util.Date; -import javax.ejb.Stateless; -import javax.ejb.TransactionAttribute; -import javax.ejb.TransactionAttributeType; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; +import jakarta.ejb.Stateless; +import jakarta.ejb.TransactionAttribute; +import jakarta.ejb.TransactionAttributeType; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; /** * A service bean that persists {@link ActionLogRecord}s to the DB. diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 6b3e27becb7..5a4c9ab9058 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -57,24 +57,24 @@ import java.util.concurrent.Callable; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.json.JsonReader; -import javax.json.JsonValue; -import javax.json.JsonValue.ValueType; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.PersistenceContext; -import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.core.*; -import javax.ws.rs.core.Response.ResponseBuilder; -import javax.ws.rs.core.Response.Status; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.json.JsonReader; +import jakarta.json.JsonValue; +import jakarta.json.JsonValue.ValueType; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.PersistenceContext; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.*; +import jakarta.ws.rs.core.Response.ResponseBuilder; +import jakarta.ws.rs.core.Response.Status; import static org.apache.commons.lang3.StringUtils.isNumeric; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index 02441a9ee11..0341f8c1127 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -31,7 +31,7 @@ import edu.harvard.iq.dataverse.RoleAssignment; import edu.harvard.iq.dataverse.UserNotification; import edu.harvard.iq.dataverse.UserNotificationServiceBean; -import static edu.harvard.iq.dataverse.api.AbstractApiBean.error; + import static edu.harvard.iq.dataverse.api.Datasets.handleVersion; import edu.harvard.iq.dataverse.api.auth.AuthRequired; @@ -73,7 +73,7 @@ import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import java.util.logging.Logger; -import javax.ejb.EJB; +import jakarta.ejb.EJB; import java.io.InputStream; import java.io.ByteArrayOutputStream; import java.io.File; @@ -88,47 +88,46 @@ import java.util.List; import java.util.Properties; import java.util.logging.Level; -import javax.inject.Inject; -import javax.json.Json; +import jakarta.inject.Inject; +import jakarta.json.Json; import java.net.URI; -import javax.json.JsonArrayBuilder; -import javax.persistence.TypedQuery; - -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; - -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.UriInfo; - - -import javax.servlet.http.HttpServletResponse; -import javax.ws.rs.BadRequestException; -import javax.ws.rs.Consumes; -import javax.ws.rs.DELETE; -import javax.ws.rs.ForbiddenException; -import javax.ws.rs.NotFoundException; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.QueryParam; -import javax.ws.rs.ServiceUnavailableException; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.Response; -import static javax.ws.rs.core.Response.Status.BAD_REQUEST; -import javax.ws.rs.core.StreamingOutput; +import jakarta.json.JsonArrayBuilder; +import jakarta.persistence.TypedQuery; + +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; + +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.HttpHeaders; +import jakarta.ws.rs.core.UriInfo; + + +import jakarta.servlet.http.HttpServletResponse; +import jakarta.ws.rs.BadRequestException; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.ForbiddenException; +import jakarta.ws.rs.NotFoundException; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.PUT; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.ServiceUnavailableException; +import jakarta.ws.rs.WebApplicationException; +import jakarta.ws.rs.core.Response; +import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; +import jakarta.ws.rs.core.StreamingOutput; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import java.net.URISyntaxException; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import javax.json.JsonObjectBuilder; -import javax.ws.rs.RedirectionException; -import javax.ws.rs.ServerErrorException; -import javax.ws.rs.core.MediaType; -import static javax.ws.rs.core.Response.Status.FORBIDDEN; -import static javax.ws.rs.core.Response.Status.UNAUTHORIZED; + +import jakarta.json.JsonObjectBuilder; +import jakarta.ws.rs.RedirectionException; +import jakarta.ws.rs.ServerErrorException; +import jakarta.ws.rs.core.MediaType; +import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; +import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED; import org.glassfish.jersey.media.multipart.FormDataBodyPart; import org.glassfish.jersey.media.multipart.FormDataParam; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 2e98388b4b6..fd3b9a89e54 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -18,7 +18,6 @@ import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.validation.EMailValidator; import edu.harvard.iq.dataverse.EjbDataverseEngine; -import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.HandlenetServiceBean; import edu.harvard.iq.dataverse.Template; import edu.harvard.iq.dataverse.TemplateServiceBean; @@ -48,19 +47,19 @@ import edu.harvard.iq.dataverse.engine.command.impl.AbstractSubmitToArchiveCommand; import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand; import edu.harvard.iq.dataverse.settings.Setting; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObjectBuilder; -import javax.ws.rs.Consumes; -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.Response; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObjectBuilder; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.PUT; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.Response; import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import java.io.InputStream; @@ -69,14 +68,14 @@ import java.util.Map.Entry; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.json.JsonObject; -import javax.json.JsonReader; -import javax.validation.ConstraintViolation; -import javax.validation.ConstraintViolationException; -import javax.ws.rs.Produces; -import javax.ws.rs.core.Response.Status; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.json.JsonObject; +import jakarta.json.JsonReader; +import jakarta.validation.ConstraintViolation; +import jakarta.validation.ConstraintViolationException; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.Response.Status; import org.apache.commons.io.IOUtils; @@ -117,12 +116,12 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Date; -import javax.inject.Inject; -import javax.json.JsonArray; -import javax.persistence.Query; -import javax.ws.rs.QueryParam; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.StreamingOutput; +import jakarta.inject.Inject; +import jakarta.json.JsonArray; +import jakarta.persistence.Query; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.WebApplicationException; +import jakarta.ws.rs.core.StreamingOutput; /** * Where the secure, setup API calls live. diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java b/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java index 6bf852d25f7..0e5b8226310 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java @@ -10,21 +10,22 @@ import java.util.TreeSet; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.servlet.FilterChain; -import javax.servlet.FilterConfig; -import javax.servlet.ServletException; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; +import jakarta.ejb.EJB; +import jakarta.servlet.Filter; +import jakarta.servlet.FilterChain; +import jakarta.servlet.FilterConfig; +import jakarta.servlet.ServletException; +import jakarta.servlet.ServletRequest; +import jakarta.servlet.ServletResponse; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; /** * A web filter to block API administration calls. * @author michael */ -public class ApiBlockingFilter implements javax.servlet.Filter { +public class ApiBlockingFilter implements Filter { public static final String UNBLOCK_KEY_QUERYPARAM = "unblock-key"; interface BlockPolicy { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ApiConfiguration.java b/src/main/java/edu/harvard/iq/dataverse/api/ApiConfiguration.java index f9ba088a4e9..d076ab8f973 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/ApiConfiguration.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/ApiConfiguration.java @@ -1,6 +1,6 @@ package edu.harvard.iq.dataverse.api; -import javax.ws.rs.ApplicationPath; +import jakarta.ws.rs.ApplicationPath; import edu.harvard.iq.dataverse.api.auth.AuthFilter; import org.glassfish.jersey.media.multipart.MultiPartFeature; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ApiRouter.java b/src/main/java/edu/harvard/iq/dataverse/api/ApiRouter.java index 691afeaef20..193e1059415 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/ApiRouter.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/ApiRouter.java @@ -1,21 +1,22 @@ package edu.harvard.iq.dataverse.api; +import jakarta.servlet.Filter; import java.io.IOException; import java.util.logging.Logger; -import javax.servlet.FilterChain; -import javax.servlet.FilterConfig; -import javax.servlet.RequestDispatcher; -import javax.servlet.ServletException; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; -import javax.servlet.http.HttpServletRequest; +import jakarta.servlet.FilterChain; +import jakarta.servlet.FilterConfig; +import jakarta.servlet.RequestDispatcher; +import jakarta.servlet.ServletException; +import jakarta.servlet.ServletRequest; +import jakarta.servlet.ServletResponse; +import jakarta.servlet.http.HttpServletRequest; /** * Routes API calls that don't have a version number to the latest API version * * @author michael */ -public class ApiRouter implements javax.servlet.Filter { +public class ApiRouter implements Filter { private static final Logger logger = Logger.getLogger(ApiRouter.class.getName()); @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java b/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java index bf9ce2adc5a..a2d06bff93e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java @@ -15,16 +15,16 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import java.io.IOException; import java.io.PrintWriter; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.json.JsonObjectBuilder; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.QueryParam; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.Response; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.json.JsonObjectBuilder; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.Response; @Stateless @Path("batch") diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BatchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/BatchServiceBean.java index 8fe58298481..daddc447117 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/BatchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/BatchServiceBean.java @@ -14,12 +14,12 @@ import java.util.Date; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.Asynchronous; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObjectBuilder; +import jakarta.ejb.Asynchronous; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObjectBuilder; /** * EJB for kicking off big batch jobs asynchronously from the REST API (BatchImport.java) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java b/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java index 9262cc6ef46..50862bc0d35 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java @@ -13,25 +13,22 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import java.sql.Timestamp; -import java.util.Calendar; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.json.Json; -import javax.json.JsonObjectBuilder; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.Response.Status; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.json.Json; +import jakarta.json.JsonObjectBuilder; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.Response.Status; import java.util.Date; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; -import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; -import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; /** * REST API bean for managing {@link BuiltinUser}s. diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BundleDownloadInstanceWriter.java b/src/main/java/edu/harvard/iq/dataverse/api/BundleDownloadInstanceWriter.java index 7edb0ac838c..35f19375902 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/BundleDownloadInstanceWriter.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/BundleDownloadInstanceWriter.java @@ -12,14 +12,14 @@ import java.io.OutputStream; import java.io.IOException; -import javax.ws.rs.InternalServerErrorException; -import javax.ws.rs.NotFoundException; -import javax.ws.rs.WebApplicationException; - -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.MultivaluedMap; -import javax.ws.rs.ext.MessageBodyWriter; -import javax.ws.rs.ext.Provider; +import jakarta.ws.rs.InternalServerErrorException; +import jakarta.ws.rs.NotFoundException; +import jakarta.ws.rs.WebApplicationException; + +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.MultivaluedMap; +import jakarta.ws.rs.ext.MessageBodyWriter; +import jakarta.ws.rs.ext.Provider; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.dataaccess.*; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DataTagsAPI.java b/src/main/java/edu/harvard/iq/dataverse/api/DataTagsAPI.java index 063033d4747..d7c8bd827d1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DataTagsAPI.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DataTagsAPI.java @@ -5,18 +5,18 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.json.JsonObject; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.client.Client; -import javax.ws.rs.client.ClientBuilder; -import javax.ws.rs.client.WebTarget; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.json.JsonObject; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.client.Client; +import jakarta.ws.rs.client.ClientBuilder; +import jakarta.ws.rs.client.WebTarget; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java index 4ec728a8159..00b7dfa6e36 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java @@ -20,18 +20,18 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.validation.ConstraintViolation; -import javax.validation.ConstraintViolationException; -import javax.ws.rs.Consumes; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.core.Response; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.validation.ConstraintViolation; +import jakarta.validation.ConstraintViolationException; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.core.Response; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.ConstraintViolationUtil; @@ -41,9 +41,9 @@ import java.util.logging.Level; import java.util.logging.Logger; -import javax.persistence.NoResultException; -import javax.persistence.TypedQuery; -import javax.ws.rs.core.Response.Status; +import jakarta.persistence.NoResultException; +import jakarta.persistence.TypedQuery; +import jakarta.ws.rs.core.Response.Status; import java.io.BufferedInputStream; import java.io.FileOutputStream; @@ -207,7 +207,7 @@ public Response showControlledVocabularyForSubject() { @GET @Path("loadNAControlledVocabularyValue") public Response loadNAControlledVocabularyValue() { - // the find will throw a javax.persistence.NoResultException if no values are in db + // the find will throw a NoResultException if no values are in db // datasetFieldService.findNAControlledVocabularyValue(); TypedQuery naValueFinder = em.createQuery("SELECT OBJECT(o) FROM ControlledVocabularyValue AS o WHERE o.datasetFieldType is null AND o.strValue = :strvalue", ControlledVocabularyValue.class); naValueFinder.setParameter("strvalue", DatasetField.NA_VALUE); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index dcd7eacf50b..dbea63cb1c8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -63,9 +63,7 @@ import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.privateurl.PrivateUrl; -import edu.harvard.iq.dataverse.api.AbstractApiBean.WrappedResponse; import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO; -import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; import edu.harvard.iq.dataverse.dataaccess.S3AccessIO; @@ -85,7 +83,6 @@ import edu.harvard.iq.dataverse.makedatacount.MakeDataCountUtil; import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import edu.harvard.iq.dataverse.settings.SettingsServiceBean.Key; import edu.harvard.iq.dataverse.util.ArchiverUtil; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.EjbUtil; @@ -96,7 +93,6 @@ import edu.harvard.iq.dataverse.util.json.JSONLDUtil; import edu.harvard.iq.dataverse.util.json.JsonLDTerm; import edu.harvard.iq.dataverse.util.json.JsonParseException; -import edu.harvard.iq.dataverse.util.json.JsonPrinter; import edu.harvard.iq.dataverse.util.SignpostingResources; import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.search.IndexServiceBean; @@ -131,32 +127,31 @@ import java.util.regex.Pattern; import java.util.stream.Collectors; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.inject.Inject; -import javax.json.*; -import javax.json.stream.JsonParsingException; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; -import javax.ws.rs.BadRequestException; -import javax.ws.rs.Consumes; -import javax.ws.rs.DELETE; -import javax.ws.rs.DefaultValue; -import javax.ws.rs.GET; -import javax.ws.rs.NotAcceptableException; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.core.*; -import javax.ws.rs.core.Response.Status; -import static javax.ws.rs.core.Response.Status.BAD_REQUEST; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.inject.Inject; +import jakarta.json.*; +import jakarta.json.stream.JsonParsingException; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; +import jakarta.ws.rs.BadRequestException; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.DefaultValue; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.NotAcceptableException; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.PUT; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.*; +import jakarta.ws.rs.core.Response.Status; +import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; import org.apache.commons.lang3.StringUtils; -import org.apache.solr.client.solrj.SolrServerException; import org.glassfish.jersey.media.multipart.FormDataBodyPart; import org.glassfish.jersey.media.multipart.FormDataContentDisposition; import org.glassfish.jersey.media.multipart.FormDataParam; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 5214e3fbdcb..a60775cbd38 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -2,8 +2,6 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetAuthor; -import edu.harvard.iq.dataverse.DatasetField; import edu.harvard.iq.dataverse.DatasetFieldType; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.Dataverse; @@ -22,9 +20,7 @@ import edu.harvard.iq.dataverse.GuestbookServiceBean; import edu.harvard.iq.dataverse.MetadataBlock; import edu.harvard.iq.dataverse.RoleAssignment; -import edu.harvard.iq.dataverse.DatasetFieldType.FieldType; -import static edu.harvard.iq.dataverse.api.AbstractApiBean.error; import edu.harvard.iq.dataverse.api.dto.ExplicitGroupDTO; import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO; import edu.harvard.iq.dataverse.api.dto.RoleDTO; @@ -36,7 +32,6 @@ import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupProvider; import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; -import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.dataverse.DataverseUtil; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -91,33 +86,33 @@ import java.util.TreeSet; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.ejb.Stateless; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonNumber; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.json.JsonString; -import javax.json.JsonValue; -import javax.json.JsonValue.ValueType; -import javax.json.stream.JsonParsingException; -import javax.validation.ConstraintViolationException; -import javax.ws.rs.BadRequestException; -import javax.ws.rs.Consumes; -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.Response.Status; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.ejb.Stateless; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonNumber; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.json.JsonString; +import jakarta.json.JsonValue; +import jakarta.json.JsonValue.ValueType; +import jakarta.json.stream.JsonParsingException; +import jakarta.validation.ConstraintViolationException; +import jakarta.ws.rs.BadRequestException; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.PUT; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.Response.Status; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import java.io.IOException; @@ -129,11 +124,11 @@ import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; -import javax.servlet.http.HttpServletResponse; -import javax.validation.constraints.NotNull; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.StreamingOutput; +import jakarta.servlet.http.HttpServletResponse; +import jakarta.validation.constraints.NotNull; +import jakarta.ws.rs.WebApplicationException; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.StreamingOutput; import javax.xml.stream.XMLStreamException; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstance.java b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstance.java index c9eb3638b90..e9f869ad8b9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstance.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstance.java @@ -14,9 +14,9 @@ import java.util.logging.Logger; import edu.harvard.iq.dataverse.dataaccess.OptionalAccessService; -import javax.faces.context.FacesContext; -import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.UriInfo; +import jakarta.faces.context.FacesContext; +import jakarta.ws.rs.core.HttpHeaders; +import jakarta.ws.rs.core.UriInfo; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java index 2410da04072..af681234e82 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java @@ -12,14 +12,14 @@ import java.io.OutputStream; import java.io.IOException; -import javax.ws.rs.WebApplicationException; +import jakarta.ws.rs.WebApplicationException; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.MultivaluedMap; -import javax.ws.rs.core.Response; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.MultivaluedMap; +import jakarta.ws.rs.core.Response; -import javax.ws.rs.ext.MessageBodyWriter; -import javax.ws.rs.ext.Provider; +import jakarta.ws.rs.ext.MessageBodyWriter; +import jakarta.ws.rs.ext.Provider; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.dataaccess.*; @@ -43,12 +43,12 @@ import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; -import javax.inject.Inject; -import javax.ws.rs.ClientErrorException; -import javax.ws.rs.NotFoundException; -import javax.ws.rs.RedirectionException; -import javax.ws.rs.ServiceUnavailableException; -import javax.ws.rs.core.HttpHeaders; +import jakarta.inject.Inject; +import jakarta.ws.rs.ClientErrorException; +import jakarta.ws.rs.NotFoundException; +import jakarta.ws.rs.RedirectionException; +import jakarta.ws.rs.ServiceUnavailableException; +import jakarta.ws.rs.core.HttpHeaders; import org.apache.tika.mime.MimeType; import org.apache.tika.mime.MimeTypeException; import org.apache.tika.mime.MimeTypes; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/EditDDI.java b/src/main/java/edu/harvard/iq/dataverse/api/EditDDI.java index 3960fe4e996..1b74ab5479e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/EditDDI.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/EditDDI.java @@ -2,9 +2,7 @@ import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.authorization.Permission; -import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; -import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; @@ -28,28 +26,25 @@ import edu.harvard.iq.dataverse.datavariable.VariableCategory; import edu.harvard.iq.dataverse.datavariable.VariableMetadataDDIParser; import edu.harvard.iq.dataverse.search.IndexServiceBean; -import org.apache.solr.client.solrj.SolrServerException; - -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.ejb.Stateless; -import javax.inject.Inject; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.Context; -import javax.ws.rs.Path; -import javax.ws.rs.PUT; -import javax.ws.rs.Consumes; -import javax.ws.rs.PathParam; + +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.ejb.Stateless; +import jakarta.inject.Inject; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PUT; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.PathParam; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamReader; -import java.io.IOException; import java.io.InputStream; -import java.util.concurrent.Future; import java.util.logging.Level; import java.util.logging.Logger; import java.util.List; @@ -57,11 +52,9 @@ import java.util.Map; import java.util.HashMap; import java.util.Collection; -import java.util.Date; -import java.sql.Timestamp; -import javax.validation.ConstraintViolationException; +import jakarta.validation.ConstraintViolationException; @Stateless @Path("edit") diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java b/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java index e53b54482b8..1feac1141bb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java @@ -4,15 +4,15 @@ import edu.harvard.iq.dataverse.externaltools.ExternalTool; import edu.harvard.iq.dataverse.externaltools.ExternalToolServiceBean; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.core.Response; -import static javax.ws.rs.core.Response.Status.BAD_REQUEST; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.core.Response; +import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; @Path("admin/externalTools") public class ExternalTools extends AbstractApiBean { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FeedbackApi.java b/src/main/java/edu/harvard/iq/dataverse/api/FeedbackApi.java index 53829cf09cc..8a178f8da62 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/FeedbackApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/FeedbackApi.java @@ -11,17 +11,17 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.MailUtil; -import javax.ejb.EJB; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonNumber; -import javax.json.JsonObject; -import javax.mail.internet.AddressException; -import javax.mail.internet.InternetAddress; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.Response.Status; +import jakarta.ejb.EJB; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonNumber; +import jakarta.json.JsonObject; +import jakarta.mail.internet.AddressException; +import jakarta.mail.internet.InternetAddress; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.Response.Status; @Path("admin/feedback") public class FeedbackApi extends AbstractApiBean { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index f6eda085c95..3324523afbc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -55,26 +55,26 @@ import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.inject.Inject; -import javax.json.Json; -import javax.servlet.http.HttpServletResponse; -import javax.ws.rs.Consumes; -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.QueryParam; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import static javax.ws.rs.core.Response.Status.BAD_REQUEST; -import javax.ws.rs.core.UriInfo; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.inject.Inject; +import jakarta.json.Json; +import jakarta.servlet.http.HttpServletResponse; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.PUT; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.HttpHeaders; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; +import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; +import jakarta.ws.rs.core.UriInfo; import org.glassfish.jersey.media.multipart.FormDataBodyPart; import org.glassfish.jersey.media.multipart.FormDataContentDisposition; import org.glassfish.jersey.media.multipart.FormDataParam; @@ -448,7 +448,7 @@ public Response updateFileMetadata(@Context ContainerRequestContext crc, @FormDa return error(Response.Status.BAD_REQUEST, "An error has occurred attempting to update the requested DataFile. It is not part of the current version of the Dataset."); } - javax.json.JsonObject jsonObject = JsonUtil.getJsonObject(jsonData); + jakarta.json.JsonObject jsonObject = JsonUtil.getJsonObject(jsonData); String incomingLabel = jsonObject.getString("label", null); String incomingDirectoryLabel = jsonObject.getString("directoryLabel", null); String existingLabel = df.getFileMetadata().getLabel(); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Groups.java b/src/main/java/edu/harvard/iq/dataverse/api/Groups.java index 5a587efadf3..d56a787c7ff 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Groups.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Groups.java @@ -9,26 +9,26 @@ import edu.harvard.iq.dataverse.util.json.JsonParseException; import edu.harvard.iq.dataverse.util.json.JsonParser; -import javax.ejb.Stateless; -import javax.interceptor.Interceptors; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.core.Response; +import jakarta.ejb.Stateless; +import jakarta.interceptor.Interceptors; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.core.Response; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*; import java.util.Optional; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Pattern; -import javax.annotation.PostConstruct; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObject; -import javax.json.JsonString; -import javax.ws.rs.DELETE; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.PathParam; +import jakarta.annotation.PostConstruct; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObject; +import jakarta.json.JsonString; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.PUT; +import jakarta.ws.rs.PathParam; import static org.apache.commons.lang3.StringUtils.isNumeric; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java index e739b1520a0..d7eec9f5757 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java @@ -1,7 +1,6 @@ package edu.harvard.iq.dataverse.api; import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.harvest.client.HarvestingClient; @@ -17,27 +16,27 @@ import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; import edu.harvard.iq.dataverse.util.json.JsonPrinter; -import javax.json.JsonObjectBuilder; +import jakarta.json.JsonObjectBuilder; import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import java.io.IOException; import java.io.StringReader; import java.util.ArrayList; import java.util.List; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObject; -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.QueryParam; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.Response; +import jakarta.ejb.EJB; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObject; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.PUT; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.Response; @Path("harvest/clients") public class HarvestingClients extends AbstractApiBean { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingServer.java b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingServer.java index f5e3e669083..308b910c425 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingServer.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingServer.java @@ -11,29 +11,29 @@ import edu.harvard.iq.dataverse.harvest.server.OAISetServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; -import javax.json.JsonObjectBuilder; +import jakarta.json.JsonObjectBuilder; import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import java.io.IOException; import java.io.StringReader; import java.util.List; import java.util.logging.Logger; import java.util.regex.Pattern; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.json.Json; -import javax.json.JsonReader; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObject; -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.QueryParam; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.Response; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.json.Json; +import jakarta.json.JsonReader; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObject; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.PUT; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.Response; import org.apache.commons.lang3.StringUtils; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Index.java b/src/main/java/edu/harvard/iq/dataverse/api/Index.java index 1361de8fbf7..4910c460b6a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Index.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Index.java @@ -47,26 +47,24 @@ import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.validation.ConstraintViolation; -import javax.validation.ConstraintViolationException; -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.QueryParam; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.Response.Status; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.validation.ConstraintViolation; +import jakarta.validation.ConstraintViolationException; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.Response.Status; import org.apache.solr.client.solrj.SolrServerException; @Path("admin/index") @@ -199,7 +197,7 @@ private Response indexAllOrSubset(Long numPartitionsSelected, Long partitionIdTo } } } - if (sb.toString().equals("javax.ejb.EJBException: Transaction aborted javax.transaction.RollbackException java.lang.IllegalStateException ")) { + if (sb.toString().contains("java.lang.IllegalStateException ")) { return ok("indexing went as well as can be expected... got java.lang.IllegalStateException but some indexing may have happened anyway"); } else { return error(Status.INTERNAL_SERVER_ERROR, sb.toString()); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Info.java b/src/main/java/edu/harvard/iq/dataverse/api/Info.java index cfee582f69d..3349c34dfcc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Info.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Info.java @@ -4,14 +4,14 @@ import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; -import javax.ejb.EJB; -import javax.json.Json; -import javax.json.JsonValue; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.Response; +import jakarta.ejb.EJB; +import jakarta.json.Json; +import jakarta.json.JsonValue; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.Response; @Path("info") public class Info extends AbstractApiBean { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/LDNInbox.java b/src/main/java/edu/harvard/iq/dataverse/api/LDNInbox.java index 3b725468161..05d12f1083c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/LDNInbox.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/LDNInbox.java @@ -28,20 +28,20 @@ import java.sql.Timestamp; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.json.Json; -import javax.json.JsonObject; -import javax.json.JsonValue; -import javax.json.JsonWriter; -import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.BadRequestException; -import javax.ws.rs.ServiceUnavailableException; -import javax.ws.rs.Consumes; -import javax.ws.rs.ForbiddenException; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.Response; +import jakarta.ejb.EJB; +import jakarta.json.Json; +import jakarta.json.JsonObject; +import jakarta.json.JsonValue; +import jakarta.json.JsonWriter; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.ws.rs.BadRequestException; +import jakarta.ws.rs.ServiceUnavailableException; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.ForbiddenException; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.Response; @Path("inbox") public class LDNInbox extends AbstractApiBean { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Licenses.java b/src/main/java/edu/harvard/iq/dataverse/api/Licenses.java index a9d7eb8024c..ab50ebbf2e4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Licenses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Licenses.java @@ -2,20 +2,20 @@ import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.Response; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.PUT; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.Response; import java.util.logging.Logger; -import javax.ejb.Stateless; -import javax.ws.rs.core.Response.Status; +import jakarta.ejb.Stateless; +import jakarta.ws.rs.core.Response.Status; import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.authorization.users.User; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Logout.java b/src/main/java/edu/harvard/iq/dataverse/api/Logout.java index d6d8d5cdc44..e8d8be04459 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Logout.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Logout.java @@ -4,10 +4,10 @@ import edu.harvard.iq.dataverse.DataverseSession; import edu.harvard.iq.dataverse.settings.FeatureFlags; -import javax.inject.Inject; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.core.Response; +import jakarta.inject.Inject; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.core.Response; @Path("logout") public class Logout extends AbstractApiBean { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Mail.java b/src/main/java/edu/harvard/iq/dataverse/api/Mail.java index 3b5050b480b..5fac2f30c89 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Mail.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Mail.java @@ -2,10 +2,10 @@ import edu.harvard.iq.dataverse.MailServiceBean; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; -import javax.ejb.EJB; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.core.Response; +import jakarta.ejb.EJB; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.core.Response; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java index 0193c7774f1..6b48dbf8415 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java @@ -18,19 +18,19 @@ import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.json.JsonValue; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.Response.Status; +import jakarta.ejb.EJB; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.json.JsonValue; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.Response.Status; /** * Note that there are makeDataCount endpoints in Datasets.java as well. diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Meta.java b/src/main/java/edu/harvard/iq/dataverse/api/Meta.java index 1ca97f2ec69..a38840ba50d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Meta.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Meta.java @@ -15,19 +15,19 @@ import edu.harvard.iq.dataverse.export.DDIExportServiceBean; import java.util.logging.Logger; -import javax.ejb.EJB; +import jakarta.ejb.EJB; import java.io.ByteArrayOutputStream; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.QueryParam; -import javax.ws.rs.Produces; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.HttpHeaders; -import javax.servlet.http.HttpServletResponse; -import javax.ws.rs.NotFoundException; -import javax.ws.rs.ServiceUnavailableException; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.HttpHeaders; +import jakarta.servlet.http.HttpServletResponse; +import jakarta.ws.rs.NotFoundException; +import jakarta.ws.rs.ServiceUnavailableException; /* Custom API exceptions [NOT YET IMPLEMENTED] diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Metadata.java b/src/main/java/edu/harvard/iq/dataverse/api/Metadata.java index b0d82b69d1b..bd937878286 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Metadata.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Metadata.java @@ -8,22 +8,17 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetServiceBean; -import java.io.IOException; -import java.util.concurrent.Future; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObjectBuilder; -import javax.ws.rs.*; -import javax.ws.rs.core.Response; +import jakarta.ejb.EJB; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObjectBuilder; +import jakarta.ws.rs.*; -import javax.ws.rs.core.Response; +import jakarta.ws.rs.core.Response; -import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.harvest.server.OAISetServiceBean; import edu.harvard.iq.dataverse.harvest.server.OAISet; -import org.apache.solr.client.solrj.SolrServerException; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java b/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java index b3e1dad13af..448fb48e389 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java @@ -1,12 +1,12 @@ package edu.harvard.iq.dataverse.api; import edu.harvard.iq.dataverse.MetadataBlock; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.core.Response; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.Response; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.brief; -import javax.ws.rs.PathParam; +import jakarta.ws.rs.PathParam; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java b/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java index e966fd200d5..7bb2570334b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java @@ -12,22 +12,22 @@ import java.util.List; import java.util.logging.Logger; -import javax.json.JsonArray; -import javax.json.JsonObject; -import javax.ws.rs.GET; -import javax.ws.rs.NotFoundException; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Request; -import javax.ws.rs.core.Response; - -import static javax.ws.rs.core.Response.Status.BAD_REQUEST; -import javax.ws.rs.core.UriInfo; -import javax.ws.rs.core.Variant; +import jakarta.json.JsonArray; +import jakarta.json.JsonObject; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.NotFoundException; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Request; +import jakarta.ws.rs.core.Response; + +import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; +import jakarta.ws.rs.core.UriInfo; +import jakarta.ws.rs.core.Variant; /** * API endpoints for various metrics. diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Notifications.java b/src/main/java/edu/harvard/iq/dataverse/api/Notifications.java index 006a95d85a5..37c894d3071 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Notifications.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Notifications.java @@ -11,19 +11,19 @@ import java.util.Optional; import java.util.Set; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObjectBuilder; -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.Response; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObjectBuilder; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.PUT; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.Response; import edu.harvard.iq.dataverse.util.MailUtil; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Pids.java b/src/main/java/edu/harvard/iq/dataverse/api/Pids.java index 2e6e97f7ced..534e42fd505 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Pids.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Pids.java @@ -10,24 +10,24 @@ import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.BundleUtil; import java.util.Arrays; -import javax.ejb.Stateless; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObjectBuilder; -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.InternalServerErrorException; -import javax.ws.rs.NotFoundException; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; +import jakarta.ejb.Stateless; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObjectBuilder; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.InternalServerErrorException; +import jakarta.ws.rs.NotFoundException; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; /** * PIDs are Persistent IDentifiers such as DOIs or Handles. diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Prov.java b/src/main/java/edu/harvard/iq/dataverse/api/Prov.java index f0f18f781f1..37b4792920f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Prov.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Prov.java @@ -15,23 +15,23 @@ import java.io.StringReader; import java.util.HashMap; import java.util.logging.Logger; -import javax.inject.Inject; -import javax.json.Json; -import javax.json.JsonException; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.ws.rs.Consumes; -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.QueryParam; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.Response; -import static javax.ws.rs.core.Response.Status.BAD_REQUEST; -import static javax.ws.rs.core.Response.Status.FORBIDDEN; +import jakarta.inject.Inject; +import jakarta.json.Json; +import jakarta.json.JsonException; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.Response; +import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; +import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; @Path("files") public class Prov extends AbstractApiBean { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Roles.java b/src/main/java/edu/harvard/iq/dataverse/api/Roles.java index b7f9e4821e5..8812f95dea1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Roles.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Roles.java @@ -1,29 +1,26 @@ package edu.harvard.iq.dataverse.api; -import static edu.harvard.iq.dataverse.api.AbstractApiBean.error; - import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.api.dto.RoleDTO; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.Permission; -import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*; import edu.harvard.iq.dataverse.engine.command.impl.CreateRoleCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteRoleCommand; import edu.harvard.iq.dataverse.util.BundleUtil; import java.util.Arrays; import java.util.List; -import javax.ejb.Stateless; -import javax.ws.rs.DELETE; -import javax.ws.rs.QueryParam; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.Response; +import jakarta.ejb.Stateless; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.Response; /** * Util API for managing roles. Might not make it to the production version. diff --git a/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java b/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java index 7ead0d23711..5d0365d022e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java @@ -11,23 +11,23 @@ import java.util.ArrayList; import java.util.List; import java.util.logging.Logger; -import javax.ejb.EJBException; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.Response; -import static javax.ws.rs.core.Response.Status.BAD_REQUEST; -import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; -import static javax.ws.rs.core.Response.Status.NOT_FOUND; +import jakarta.ejb.EJBException; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.PUT; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.Response; +import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; +import static jakarta.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; +import static jakarta.ws.rs.core.Response.Status.NOT_FOUND; @Path("admin/savedsearches") public class SavedSearches extends AbstractApiBean { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Search.java b/src/main/java/edu/harvard/iq/dataverse/api/Search.java index d3c9d0a4cc6..c760534ca7b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Search.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Search.java @@ -24,19 +24,18 @@ import java.util.Arrays; import java.util.List; import java.util.Map; -import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObjectBuilder; -import javax.servlet.http.HttpServletResponse; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.QueryParam; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.Response; +import jakarta.ejb.EJB; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObjectBuilder; +import jakarta.servlet.http.HttpServletResponse; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.Response; import org.apache.commons.lang3.StringUtils; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/api/SiteMap.java b/src/main/java/edu/harvard/iq/dataverse/api/SiteMap.java index 787c3380e5b..37d6a2aa3fe 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/SiteMap.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/SiteMap.java @@ -2,13 +2,13 @@ import edu.harvard.iq.dataverse.sitemap.SiteMapServiceBean; import edu.harvard.iq.dataverse.sitemap.SiteMapUtil; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; @Stateless @Path("admin/sitemap") diff --git a/src/main/java/edu/harvard/iq/dataverse/api/StorageSites.java b/src/main/java/edu/harvard/iq/dataverse/api/StorageSites.java index 54adeecd9f9..2915328428e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/StorageSites.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/StorageSites.java @@ -3,16 +3,16 @@ import edu.harvard.iq.dataverse.locality.StorageSite; import edu.harvard.iq.dataverse.locality.StorageSiteUtil; import java.util.List; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObject; -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.core.Response; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObject; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.PUT; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.core.Response; @Path("admin/storageSites") public class StorageSites extends AbstractApiBean { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java b/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java index 42caa95b9f5..87be1f14e05 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java @@ -2,20 +2,19 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; -import static edu.harvard.iq.dataverse.api.AbstractApiBean.error; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.externaltools.ExternalTool; import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler; import java.util.List; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObjectBuilder; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.Response; -import static javax.ws.rs.core.Response.Status.BAD_REQUEST; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObjectBuilder; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.Response; +import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; @Path("admin/test") public class TestApi extends AbstractApiBean { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/TestIngest.java b/src/main/java/edu/harvard/iq/dataverse/api/TestIngest.java index 15c3b34f6af..05ba150df8e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/TestIngest.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/TestIngest.java @@ -18,22 +18,22 @@ import edu.harvard.iq.dataverse.util.StringUtil; import java.io.BufferedInputStream; import java.util.logging.Logger; -import javax.ejb.EJB; +import jakarta.ejb.EJB; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.UriInfo; -import javax.servlet.http.HttpServletResponse; -import javax.ws.rs.QueryParam; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.HttpHeaders; +import jakarta.ws.rs.core.UriInfo; +import jakarta.servlet.http.HttpServletResponse; +import jakarta.ws.rs.QueryParam; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Users.java b/src/main/java/edu/harvard/iq/dataverse/api/Users.java index 14bf25c91b2..791fc7aa774 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Users.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Users.java @@ -5,8 +5,6 @@ */ package edu.harvard.iq.dataverse.api; -import static edu.harvard.iq.dataverse.api.AbstractApiBean.error; - import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; @@ -15,32 +13,30 @@ import edu.harvard.iq.dataverse.engine.command.impl.GetUserTracesCommand; import edu.harvard.iq.dataverse.engine.command.impl.MergeInAccountCommand; import edu.harvard.iq.dataverse.engine.command.impl.RevokeAllRolesCommand; -import edu.harvard.iq.dataverse.metrics.MetricsUtil; import edu.harvard.iq.dataverse.util.FileUtil; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; -import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.Stateless; -import javax.json.JsonArray; -import javax.json.JsonObjectBuilder; -import javax.ws.rs.BadRequestException; -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Request; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.Variant; +import jakarta.ejb.Stateless; +import jakarta.json.JsonArray; +import jakarta.json.JsonObjectBuilder; +import jakarta.ws.rs.BadRequestException; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Request; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.Variant; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Util.java b/src/main/java/edu/harvard/iq/dataverse/api/Util.java index 82adedc709f..25855769a38 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Util.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Util.java @@ -7,9 +7,9 @@ import java.util.TimeZone; import java.util.TreeSet; import java.util.stream.Collectors; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonReader; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonReader; public class Util { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Workflows.java b/src/main/java/edu/harvard/iq/dataverse/api/Workflows.java index 4269a0215bf..4eadcedf71a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Workflows.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Workflows.java @@ -8,11 +8,11 @@ import java.util.Arrays; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.core.Response; +import jakarta.ejb.EJB; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.core.Response; /** * API Endpoint for external systems to report the results of workflow step diff --git a/src/main/java/edu/harvard/iq/dataverse/api/WorkflowsAdmin.java b/src/main/java/edu/harvard/iq/dataverse/api/WorkflowsAdmin.java index 4babe6875e2..8d5024c1c14 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/WorkflowsAdmin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/WorkflowsAdmin.java @@ -11,18 +11,18 @@ import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean; import java.util.Arrays; import java.util.Optional; -import javax.ejb.EJB; -import javax.json.Json; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.json.JsonValue; -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.core.Response; +import jakarta.ejb.EJB; +import jakarta.json.Json; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.json.JsonValue; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.PUT; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.core.Response; /** * API Endpoint for managing workflows. diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanism.java index 60b75757f3c..0dd8a28baca 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanism.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanism.java @@ -7,8 +7,8 @@ import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean; -import javax.inject.Inject; -import javax.ws.rs.container.ContainerRequestContext; +import jakarta.inject.Inject; +import jakarta.ws.rs.container.ContainerRequestContext; import java.util.logging.Logger; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthFilter.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthFilter.java index dea9c571d22..34a72d718f0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthFilter.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthFilter.java @@ -3,12 +3,12 @@ import edu.harvard.iq.dataverse.api.ApiConstants; import edu.harvard.iq.dataverse.authorization.users.User; -import javax.annotation.Priority; -import javax.inject.Inject; -import javax.ws.rs.Priorities; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.container.ContainerRequestFilter; -import javax.ws.rs.ext.Provider; +import jakarta.annotation.Priority; +import jakarta.inject.Inject; +import jakarta.ws.rs.Priorities; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.container.ContainerRequestFilter; +import jakarta.ws.rs.ext.Provider; import java.io.IOException; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthMechanism.java index e9bf1f39361..bd34acbf702 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthMechanism.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthMechanism.java @@ -2,7 +2,7 @@ import edu.harvard.iq.dataverse.authorization.users.User; -import javax.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.container.ContainerRequestContext; /** * @author Guillermo Portas diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthRequired.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthRequired.java index 4deacc7f66e..bf0d785eeb3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthRequired.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthRequired.java @@ -1,6 +1,6 @@ package edu.harvard.iq.dataverse.api.auth; -import javax.ws.rs.NameBinding; +import jakarta.ws.rs.NameBinding; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.Target; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java index c4b03728179..b5a48427fa5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java @@ -11,9 +11,9 @@ import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.settings.FeatureFlags; -import javax.inject.Inject; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.core.HttpHeaders; +import jakarta.inject.Inject; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.HttpHeaders; import java.io.IOException; import java.util.List; import java.util.Optional; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanism.java index 59384ff0336..801e2752b9e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanism.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanism.java @@ -3,8 +3,8 @@ import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.authorization.users.User; -import javax.inject.Inject; -import javax.ws.rs.container.ContainerRequestContext; +import jakarta.inject.Inject; +import jakarta.ws.rs.container.ContainerRequestContext; import java.util.ArrayList; import java.util.Arrays; import java.util.List; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanism.java index 53aec9235a4..c1471c3f5b3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanism.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanism.java @@ -4,8 +4,8 @@ import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.settings.FeatureFlags; -import javax.inject.Inject; -import javax.ws.rs.container.ContainerRequestContext; +import jakarta.inject.Inject; +import jakarta.ws.rs.container.ContainerRequestContext; public class SessionCookieAuthMechanism implements AuthMechanism { @Inject diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java index caa58dfddf3..f8572144236 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java @@ -7,9 +7,9 @@ import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.UrlSignerUtil; -import javax.inject.Inject; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.core.UriInfo; +import jakarta.inject.Inject; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.UriInfo; import java.net.URLDecoder; import java.nio.charset.StandardCharsets; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanism.java index e673e14e677..bbd67713e85 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanism.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanism.java @@ -4,8 +4,8 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; -import javax.inject.Inject; -import javax.ws.rs.container.ContainerRequestContext; +import jakarta.inject.Inject; +import jakarta.ws.rs.container.ContainerRequestContext; /** * @author Guillermo Portas diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedAuthErrorResponse.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedAuthErrorResponse.java index 18eafde6ede..40431557261 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedAuthErrorResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedAuthErrorResponse.java @@ -3,8 +3,8 @@ import edu.harvard.iq.dataverse.api.ApiConstants; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; public class WrappedAuthErrorResponse extends Exception { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/batchjob/BatchJobResource.java b/src/main/java/edu/harvard/iq/dataverse/api/batchjob/BatchJobResource.java index 37c29f20efe..09a60b1b700 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/batchjob/BatchJobResource.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/batchjob/BatchJobResource.java @@ -4,17 +4,17 @@ import edu.harvard.iq.dataverse.api.AbstractApiBean; import edu.harvard.iq.dataverse.batch.entities.JobExecutionEntity; -import javax.batch.operations.JobOperator; -import javax.batch.runtime.BatchRuntime; -import javax.batch.runtime.JobExecution; -import javax.batch.runtime.JobInstance; -import javax.ejb.Stateless; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; +import jakarta.batch.operations.JobOperator; +import jakarta.batch.runtime.BatchRuntime; +import jakarta.batch.runtime.JobExecution; +import jakarta.batch.runtime.JobInstance; +import jakarta.ejb.Stateless; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; import java.util.ArrayList; import java.util.List; import java.util.Set; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/batchjob/FileRecordJobResource.java b/src/main/java/edu/harvard/iq/dataverse/api/batchjob/FileRecordJobResource.java index 8695c7dcab7..b7a6b7cfafd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/batchjob/FileRecordJobResource.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/batchjob/FileRecordJobResource.java @@ -6,21 +6,21 @@ import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.batch.jobs.importer.ImportMode; import edu.harvard.iq.dataverse.engine.command.impl.ImportFromFileSystemCommand; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.ws.rs.DefaultValue; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.ws.rs.DefaultValue; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonObject; +import jakarta.json.Json; +import jakarta.json.JsonObject; @Stateless @Path("batch/jobs") diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java index 6543d771ebe..5bc50903be8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java @@ -18,12 +18,12 @@ import edu.harvard.iq.dataverse.util.ConstraintViolationUtil; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.inject.Inject; -import javax.servlet.http.HttpServletRequest; -import javax.validation.ConstraintViolation; -import javax.validation.ConstraintViolationException; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.inject.Inject; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.validation.ConstraintViolation; +import jakarta.validation.ConstraintViolationException; import org.apache.abdera.parser.ParseException; import org.swordapp.server.AuthCredentials; import org.swordapp.server.CollectionDepositManager; @@ -174,7 +174,9 @@ public DepositReceipt createNew(String collectionUri, Deposit deposit, AuthCrede // curl --insecure --data-binary "@multipart.dat" -H 'Content-Type: multipart/related; boundary="===============0670350989=="' -H "MIME-Version: 1.0" https://sword:sword@localhost:8181/dvn/api/data-deposit/v1/swordv2/collection/dataverse/sword/hdl:1902.1/12345 // but... // "Yeah, multipart is critically broken across all implementations" -- http://www.mail-archive.com/sword-app-tech@lists.sourceforge.net/msg00327.html - throw new UnsupportedOperationException("Not yet implemented"); + // + // OB 2022-03-24 -> sword2-server v2.0 library drops support for multipart/related. + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Multipart/related RFC2387 type posts are not supported. Please POST an Atom entry instead."); } else { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "expected deposit types are isEntryOnly, isBinaryOnly, and isMultiPart"); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java index c7ed00a23d0..084136f2b5d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java @@ -11,9 +11,9 @@ import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; import java.util.List; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.inject.Inject; -import javax.servlet.http.HttpServletRequest; +import jakarta.ejb.EJB; +import jakarta.inject.Inject; +import jakarta.servlet.http.HttpServletRequest; import javax.xml.namespace.QName; import org.apache.abdera.Abdera; import org.apache.abdera.i18n.iri.IRI; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java index b605f3717a8..4d4d1d08b51 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java @@ -25,12 +25,12 @@ import java.util.List; import java.util.Map; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.inject.Inject; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.servlet.http.HttpServletRequest; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.inject.Inject; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.servlet.http.HttpServletRequest; import org.apache.abdera.parser.ParseException; import org.swordapp.server.AuthCredentials; import org.swordapp.server.ContainerManager; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java index 482e35df781..93b7dc96563 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java @@ -9,9 +9,6 @@ import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.PermissionServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; -import edu.harvard.iq.dataverse.dataaccess.StorageIO; -import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil; -import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; @@ -29,12 +26,12 @@ import java.util.Map; import java.util.Set; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.inject.Inject; -import javax.servlet.http.HttpServletRequest; -import javax.validation.ConstraintViolation; -import javax.validation.ConstraintViolationException; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.inject.Inject; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.validation.ConstraintViolation; +import jakarta.validation.ConstraintViolationException; import edu.harvard.iq.dataverse.util.file.CreateDataFileResult; import org.swordapp.server.AuthCredentials; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2CollectionServlet.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2CollectionServlet.java index a761afd1324..c509a8d6f52 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2CollectionServlet.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2CollectionServlet.java @@ -2,10 +2,10 @@ import java.io.IOException; import java.util.concurrent.locks.ReentrantLock; -import javax.inject.Inject; -import javax.servlet.ServletException; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; +import jakarta.inject.Inject; +import jakarta.servlet.ServletException; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; import org.swordapp.server.CollectionAPI; import org.swordapp.server.servlets.SwordServlet; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ContainerServlet.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ContainerServlet.java index 441186cc63f..53dce24c0fe 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ContainerServlet.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ContainerServlet.java @@ -2,10 +2,10 @@ import java.io.IOException; import java.util.concurrent.locks.ReentrantLock; -import javax.inject.Inject; -import javax.servlet.ServletException; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; +import jakarta.inject.Inject; +import jakarta.servlet.ServletException; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; import org.swordapp.server.ContainerAPI; import org.swordapp.server.ContainerManager; import org.swordapp.server.StatementManager; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2MediaResourceServlet.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2MediaResourceServlet.java index c455a6fd26a..245ab6ab23b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2MediaResourceServlet.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2MediaResourceServlet.java @@ -2,10 +2,10 @@ import java.io.IOException; import java.util.concurrent.locks.ReentrantLock; -import javax.inject.Inject; -import javax.servlet.ServletException; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; +import jakarta.inject.Inject; +import jakarta.servlet.ServletException; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; import org.swordapp.server.MediaResourceAPI; import org.swordapp.server.servlets.SwordServlet; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ServiceDocumentServlet.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ServiceDocumentServlet.java index 37db76d3c9c..eab005d87fa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ServiceDocumentServlet.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ServiceDocumentServlet.java @@ -1,10 +1,10 @@ package edu.harvard.iq.dataverse.api.datadeposit; import java.io.IOException; -import javax.inject.Inject; -import javax.servlet.ServletException; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; +import jakarta.inject.Inject; +import jakarta.servlet.ServletException; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; import org.swordapp.server.ServiceDocumentAPI; import org.swordapp.server.servlets.SwordServlet; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2StatementServlet.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2StatementServlet.java index ed1202d8c77..4bcc9c6afe8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2StatementServlet.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2StatementServlet.java @@ -1,10 +1,10 @@ package edu.harvard.iq.dataverse.api.datadeposit; import java.io.IOException; -import javax.inject.Inject; -import javax.servlet.ServletException; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; +import jakarta.inject.Inject; +import jakarta.servlet.ServletException; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; import org.swordapp.server.StatementAPI; import org.swordapp.server.StatementManager; import org.swordapp.server.servlets.SwordServlet; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java index 049b20f605b..134d54aef88 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java @@ -8,8 +8,8 @@ import edu.harvard.iq.dataverse.util.SystemConfig; import java.util.List; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.inject.Inject; +import jakarta.ejb.EJB; +import jakarta.inject.Inject; import org.apache.commons.lang3.StringUtils; import org.swordapp.server.AuthCredentials; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java index 05864fb2da0..95763e0eafb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java @@ -17,9 +17,9 @@ import java.util.Optional; import java.util.logging.Logger; import static java.util.stream.Collectors.joining; -import javax.ejb.EJB; -import javax.inject.Inject; -import javax.servlet.http.HttpServletRequest; +import jakarta.ejb.EJB; +import jakarta.inject.Inject; +import jakarta.servlet.http.HttpServletRequest; import org.apache.abdera.i18n.iri.IRI; import org.apache.abdera.i18n.iri.IRISyntaxException; import org.apache.abdera.model.AtomDate; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordConfigurationImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordConfigurationImpl.java index 1e506c6a0b1..a5564e9fbdb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordConfigurationImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordConfigurationImpl.java @@ -6,7 +6,7 @@ import java.util.Arrays; import java.util.List; import java.util.logging.Logger; -import javax.ejb.EJB; +import jakarta.ejb.EJB; import org.swordapp.server.SwordConfiguration; public class SwordConfigurationImpl implements SwordConfiguration { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordFilter.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordFilter.java new file mode 100644 index 00000000000..aa7e028a4ba --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordFilter.java @@ -0,0 +1,49 @@ +package edu.harvard.iq.dataverse.api.datadeposit; + +import jakarta.servlet.Filter; +import jakarta.servlet.FilterChain; +import jakarta.servlet.ServletException; +import jakarta.servlet.ServletRequest; +import jakarta.servlet.ServletResponse; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletRequestWrapper; +import java.io.IOException; + +public class SwordFilter implements Filter { + + @Override + public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { + HttpServletRequest req = (HttpServletRequest) request; + MutateHeaders requestWrapper = new MutateHeaders(req); + chain.doFilter(requestWrapper, response); + } + + /** + * We are mutating headers because Payara 6 is more strict than Paraya 5 and + * wants "attachment; filename=" instead of just "filename=". In order to + * not break backward compatibility, we add "attachment; " for our (SWORD) + * API users. (This only seem to affect our SWORD API.) That is, the can + * continue to send '-H "Content-Disposition: filename=example.zip"' as + * we've documented for years. + */ + public class MutateHeaders extends HttpServletRequestWrapper { + + public MutateHeaders(HttpServletRequest request) { + super(request); + } + + // inspired by https://stackoverflow.com/questions/2811769/adding-an-http-header-to-the-request-in-a-servlet-filter/2811841#2811841 + @Override + public String getHeader(String name) { + String header = super.getHeader(name); + if ("Content-Disposition".equalsIgnoreCase(name)) { + if (header.startsWith("filename=")) { + header = header.replaceFirst("filename=", "attachment; filename="); + } + } + return (header != null) ? header : super.getParameter(name); + } + + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java index 2e093dbcf36..22b6ee05e48 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java @@ -19,10 +19,10 @@ import java.util.List; import java.util.Map; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Inject; +import jakarta.inject.Named; import org.apache.commons.lang3.StringUtils; import org.swordapp.server.SwordEntry; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ConstraintViolationExceptionHandler.java b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ConstraintViolationExceptionHandler.java index 4cbf31d1d2c..bb57059a99a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ConstraintViolationExceptionHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ConstraintViolationExceptionHandler.java @@ -1,17 +1,15 @@ package edu.harvard.iq.dataverse.api.errorhandlers; -import edu.harvard.iq.dataverse.util.json.JsonPrinter; - -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObject; -import javax.validation.ConstraintViolation; -import javax.validation.ConstraintViolationException; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObject; +import jakarta.validation.ConstraintViolation; +import jakarta.validation.ConstraintViolationException; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.ext.ExceptionMapper; +import jakarta.ws.rs.ext.Provider; import java.util.List; import java.util.stream.Collectors; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/JsonParseExceptionHandler.java b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/JsonParseExceptionHandler.java index 286272d9de3..2f974a1c5be 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/JsonParseExceptionHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/JsonParseExceptionHandler.java @@ -2,17 +2,14 @@ import edu.harvard.iq.dataverse.util.json.JsonParseException; -import javax.json.Json; -import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.BadRequestException; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; -import java.util.UUID; -import java.util.logging.Level; -import java.util.logging.Logger; +import jakarta.json.Json; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.ws.rs.BadRequestException; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.ext.ExceptionMapper; +import jakarta.ws.rs.ext.Provider; /** * Make a failing JSON parsing request appear to be a BadRequest (error code 400) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ThrowableHandler.java b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ThrowableHandler.java index 4064ee21474..8e43a1876bf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ThrowableHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ThrowableHandler.java @@ -2,11 +2,11 @@ import edu.harvard.iq.dataverse.api.util.JsonResponseBuilder; -import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.Response; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.ext.ExceptionMapper; +import jakarta.ws.rs.ext.Provider; import java.util.Optional; import java.util.logging.Level; import java.util.logging.Logger; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/WebApplicationExceptionHandler.java b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/WebApplicationExceptionHandler.java index 5f28bfd0afc..e67e91e63c9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/WebApplicationExceptionHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/WebApplicationExceptionHandler.java @@ -8,12 +8,12 @@ import edu.harvard.iq.dataverse.api.util.JsonResponseBuilder; import edu.harvard.iq.dataverse.util.BundleUtil; -import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.Response; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.ws.rs.WebApplicationException; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.ext.ExceptionMapper; +import jakarta.ws.rs.ext.Provider; import java.util.Optional; import java.util.logging.Level; import java.util.logging.Logger; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldMap.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldMap.java index fc96215cef0..2bea36a6047 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldMap.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldMap.java @@ -7,14 +7,14 @@ package edu.harvard.iq.dataverse.api.imports; import java.io.Serializable; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.Table; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.Table; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldServiceBean.java index e7b8e71495b..240baeefcff 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldServiceBean.java @@ -5,9 +5,9 @@ */ package edu.harvard.iq.dataverse.api.imports; -import javax.ejb.Stateless; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; +import jakarta.ejb.Stateless; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java index 39b9ddbcf5f..8f7934dd528 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java @@ -22,15 +22,14 @@ import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.ejb.Stateless; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.ejb.Stateless; import javax.xml.stream.XMLStreamConstants; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import javax.xml.stream.XMLInputFactory; -import edu.harvard.iq.dataverse.util.json.ControlledVocabularyException; import org.apache.commons.lang3.StringUtils; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java index 57d7714ba77..f7a6cf54dd5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java @@ -11,7 +11,6 @@ import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.ForeignMetadataFieldMapping; import edu.harvard.iq.dataverse.ForeignMetadataFormatMapping; -import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.HandlenetServiceBean; import edu.harvard.iq.dataverse.MetadataBlockServiceBean; import edu.harvard.iq.dataverse.api.dto.*; @@ -32,19 +31,19 @@ import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.json.Json; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.json.Json; import javax.xml.stream.XMLStreamConstants; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; -import javax.json.JsonObject; -import javax.json.JsonReader; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.PersistenceContext; +import jakarta.json.JsonObject; +import jakarta.json.JsonReader; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.PersistenceContext; import javax.xml.stream.XMLInputFactory; import net.handle.hdllib.HandleException; import net.handle.hdllib.HandleResolver; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index cb6cef6ded5..bcb67b180c8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -51,23 +51,23 @@ import java.util.logging.Level; import java.util.logging.LogRecord; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.ejb.Stateless; -import javax.ejb.TransactionAttribute; -import javax.ejb.TransactionAttributeType; -import static javax.ejb.TransactionAttributeType.REQUIRES_NEW; -import javax.json.Json; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.json.JsonReader; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.validation.ConstraintViolation; -import javax.validation.ConstraintViolationException; -import javax.validation.Validation; -import javax.validation.Validator; -import javax.validation.ValidatorFactory; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.ejb.Stateless; +import jakarta.ejb.TransactionAttribute; +import jakarta.ejb.TransactionAttributeType; +import static jakarta.ejb.TransactionAttributeType.REQUIRES_NEW; +import jakarta.json.Json; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.json.JsonReader; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.validation.ConstraintViolation; +import jakarta.validation.ConstraintViolationException; +import jakarta.validation.Validation; +import jakarta.validation.Validator; +import jakarta.validation.ValidatorFactory; import javax.xml.stream.XMLStreamException; import org.apache.commons.lang3.StringUtils; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilder.java b/src/main/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilder.java index aef17d1ab34..71a010b7e6d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilder.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilder.java @@ -2,14 +2,14 @@ import edu.harvard.iq.dataverse.api.ApiBlockingFilter; -import javax.json.Json; -import javax.json.JsonValue; -import javax.json.JsonObjectBuilder; -import javax.servlet.ServletResponse; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; +import jakarta.json.Json; +import jakarta.json.JsonValue; +import jakarta.json.JsonObjectBuilder; +import jakarta.servlet.ServletResponse; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; import org.apache.commons.lang3.exception.ExceptionUtils; diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthFilter.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthFilter.java index 15d1cb07a11..a2cf3082ae7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthFilter.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthFilter.java @@ -8,14 +8,14 @@ import java.util.logging.Level; import java.util.logging.Logger; import java.util.logging.SimpleFormatter; -import javax.inject.Inject; -import javax.servlet.Filter; -import javax.servlet.FilterChain; -import javax.servlet.FilterConfig; -import javax.servlet.ServletException; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; -import javax.servlet.http.HttpServletRequest; +import jakarta.inject.Inject; +import jakarta.servlet.Filter; +import jakarta.servlet.FilterChain; +import jakarta.servlet.FilterConfig; +import jakarta.servlet.ServletException; +import jakarta.servlet.ServletRequest; +import jakarta.servlet.ServletResponse; +import jakarta.servlet.http.HttpServletRequest; public class AuthFilter implements Filter { diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthTestDataServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthTestDataServiceBean.java index 3715900733c..9cee3ec67c7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthTestDataServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthTestDataServiceBean.java @@ -15,7 +15,7 @@ import java.util.HashMap; import java.util.Map; import java.util.logging.Logger; -import javax.ejb.Stateless; +import jakarta.ejb.Stateless; import org.apache.commons.lang3.StringUtils; @Stateless diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticatedUserLookup.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticatedUserLookup.java index 94a773bc977..3291dd2efbf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticatedUserLookup.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticatedUserLookup.java @@ -2,17 +2,17 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import java.io.Serializable; -import javax.persistence.CascadeType; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.OneToOne; -import javax.persistence.Table; -import javax.persistence.UniqueConstraint; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.OneToOne; +import jakarta.persistence.Table; +import jakarta.persistence.UniqueConstraint; /** * A somewhat glorified key-value pair, persisted in the database. diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java index 6289865baf0..a93d01527a0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java @@ -22,15 +22,15 @@ import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; -import javax.annotation.PostConstruct; -import javax.ejb.EJB; -import javax.ejb.Lock; -import static javax.ejb.LockType.READ; -import static javax.ejb.LockType.WRITE; -import javax.ejb.Singleton; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; +import jakarta.annotation.PostConstruct; +import jakarta.ejb.EJB; +import jakarta.ejb.Lock; +import static jakarta.ejb.LockType.READ; +import static jakarta.ejb.LockType.WRITE; +import jakarta.ejb.Singleton; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java index 9bf53116efa..106a83a4ad1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java @@ -6,31 +6,23 @@ import edu.harvard.iq.dataverse.RoleAssigneeServiceBean; import edu.harvard.iq.dataverse.UserNotificationServiceBean; import edu.harvard.iq.dataverse.UserServiceBean; -import edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactory; import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; import edu.harvard.iq.dataverse.authorization.exceptions.AuthenticationFailedException; -import edu.harvard.iq.dataverse.authorization.exceptions.AuthenticationProviderFactoryNotFoundException; -import edu.harvard.iq.dataverse.authorization.exceptions.AuthorizationSetupException; import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroup; import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean; import edu.harvard.iq.dataverse.authorization.providers.AuthenticationProviderFactory; -import edu.harvard.iq.dataverse.authorization.providers.AuthenticationProviderRow; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinAuthenticationProvider; -import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinAuthenticationProviderFactory; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean; import edu.harvard.iq.dataverse.authorization.providers.builtin.PasswordEncryption; import edu.harvard.iq.dataverse.authorization.providers.oauth2.AbstractOAuth2AuthenticationProvider; -import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2AuthenticationProviderFactory; import edu.harvard.iq.dataverse.authorization.providers.shib.ShibAuthenticationProvider; -import edu.harvard.iq.dataverse.authorization.providers.shib.ShibAuthenticationProviderFactory; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailData; import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailServiceBean; -import edu.harvard.iq.dataverse.engine.command.impl.RevokeAllRolesCommand; import edu.harvard.iq.dataverse.passwordreset.PasswordResetData; import edu.harvard.iq.dataverse.passwordreset.PasswordResetServiceBean; import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean; @@ -44,7 +36,6 @@ import java.util.Calendar; import java.util.Collection; import java.util.Date; -import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -53,21 +44,21 @@ import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; -import javax.annotation.PostConstruct; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.NonUniqueResultException; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; -import javax.persistence.TypedQuery; -import javax.validation.ConstraintViolation; -import javax.validation.Validation; -import javax.validation.Validator; -import javax.validation.ValidatorFactory; +import jakarta.annotation.PostConstruct; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.NonUniqueResultException; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; +import jakarta.persistence.TypedQuery; +import jakarta.validation.ConstraintViolation; +import jakarta.validation.Validation; +import jakarta.validation.Validator; +import jakarta.validation.ValidatorFactory; /** * AuthenticationService is for general authentication-related operations. diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java b/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java index 12ddf817221..ff1a5546f38 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java @@ -11,19 +11,19 @@ import java.util.MissingResourceException; import java.util.Objects; import java.util.Set; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.Table; -import javax.validation.constraints.Pattern; -import javax.validation.constraints.Size; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.Table; +import jakarta.validation.constraints.Pattern; +import jakarta.validation.constraints.Size; /** * A role is an annotated set of permissions. A role belongs diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRolePermissionHelper.java b/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRolePermissionHelper.java index 4e6b54a8d49..966247bce2e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRolePermissionHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRolePermissionHelper.java @@ -4,21 +4,17 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.DataverseRoleServiceBean; -import edu.harvard.iq.dataverse.authorization.DataverseRole; -import java.sql.Array; -import java.util.AbstractMap; + import java.util.ArrayList; import java.util.HashMap; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; import org.apache.commons.lang3.StringUtils; /* diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBean.java index 66293a4f781..a746eee0a60 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBean.java @@ -26,11 +26,11 @@ import java.util.logging.Logger; import static java.util.stream.Collectors.toSet; import java.util.stream.Stream; -import javax.annotation.PostConstruct; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.annotation.PostConstruct; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Inject; +import jakarta.inject.Named; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/PersistedGlobalGroup.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/PersistedGlobalGroup.java index 52785d5c7e2..1ef3b01d752 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/PersistedGlobalGroup.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/PersistedGlobalGroup.java @@ -3,14 +3,14 @@ import edu.harvard.iq.dataverse.authorization.groups.Group; import edu.harvard.iq.dataverse.authorization.RoleAssigneeDisplayInfo; import java.io.Serializable; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.Table; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.Table; /** * Convenience base class for implementing groups that apply to the entire Dataverse diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroup.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroup.java index 43705a2240e..2723561d8b4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroup.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroup.java @@ -13,24 +13,24 @@ import java.util.Objects; import java.util.Set; import java.util.TreeSet; -import javax.persistence.Column; -import javax.persistence.ElementCollection; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.JoinTable; -import javax.persistence.ManyToMany; -import javax.persistence.ManyToOne; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.PostLoad; -import javax.persistence.PrePersist; -import javax.persistence.Table; -import javax.persistence.Transient; -import javax.validation.constraints.Pattern; +import jakarta.persistence.Column; +import jakarta.persistence.ElementCollection; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.JoinTable; +import jakarta.persistence.ManyToMany; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.PostLoad; +import jakarta.persistence.PrePersist; +import jakarta.persistence.Table; +import jakarta.persistence.Transient; +import jakarta.validation.constraints.Pattern; import org.hibernate.validator.constraints.NotBlank; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupServiceBean.java index b7c1b46b3a7..a688fac0e34 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupServiceBean.java @@ -10,17 +10,16 @@ import java.util.List; import java.util.Set; import java.util.TreeSet; -import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; import static java.util.stream.Collectors.joining; -import javax.annotation.PostConstruct; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.PersistenceContext; +import jakarta.annotation.PostConstruct; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.PersistenceContext; /** * A bean providing the {@link ExplicitGroupProvider}s with container services, diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroup.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroup.java index a3231557898..038fbbfc6e0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroup.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroup.java @@ -12,12 +12,12 @@ import java.util.HashSet; import java.util.Objects; import java.util.Set; -import javax.persistence.CascadeType; -import javax.persistence.Entity; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.OneToMany; -import javax.persistence.Transient; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Entity; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.OneToMany; +import jakarta.persistence.Transient; @NamedQueries({ @NamedQuery(name="IpGroup.findAll", diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupsServiceBean.java index c03cf26e11e..15282045b3a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupsServiceBean.java @@ -10,12 +10,12 @@ import java.util.List; import java.util.Set; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.PersistenceContext; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.PersistenceContext; /** * Provides CRUD tools to efficiently manage IP groups in a Java EE container. diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4Range.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4Range.java index 3ecd7689e1c..8694b7d455b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4Range.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4Range.java @@ -1,13 +1,13 @@ package edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip; import java.math.BigInteger; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.Table; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.Table; /** * A range of IPv4 addresses. In order to make SQL querying efficient, the actual fields diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv6Range.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv6Range.java index d1301d550c7..379c64a88cf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv6Range.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv6Range.java @@ -1,13 +1,13 @@ package edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip; import java.io.Serializable; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.Table; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.Table; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressRange.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressRange.java index b71dbcd0eba..fc21397898f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressRange.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressRange.java @@ -2,8 +2,8 @@ import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroup; import java.util.Objects; -import javax.persistence.ManyToOne; -import javax.persistence.MappedSuperclass; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.MappedSuperclass; /** * A range of {@link IpAddress}es. Abstract class - to instantiate, you need to diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroup.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroup.java index def11c57076..15b2fd1810c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroup.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroup.java @@ -6,11 +6,11 @@ import java.util.Arrays; import java.util.List; import java.util.Objects; -import javax.persistence.Entity; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.Transient; -import javax.validation.constraints.NotEmpty; +import jakarta.persistence.Entity; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.Transient; +import jakarta.validation.constraints.NotEmpty; ; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroupServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroupServiceBean.java index 58e72b7b575..b1b1e883705 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroupServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroupServiceBean.java @@ -9,14 +9,14 @@ import java.util.logging.Logger; import java.util.regex.Pattern; import java.util.stream.Collectors; -import javax.annotation.PostConstruct; -import javax.ejb.*; -import javax.inject.Inject; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.PersistenceContext; -import javax.ws.rs.NotFoundException; +import jakarta.annotation.PostConstruct; +import jakarta.ejb.*; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.PersistenceContext; +import jakarta.ws.rs.NotFoundException; /** * A bean providing the {@link MailDomainGroupProvider}s with container services, such as database connectivity. diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroup.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroup.java index 79fda0ca7d7..30850f0fb20 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroup.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroup.java @@ -1,17 +1,16 @@ package edu.harvard.iq.dataverse.authorization.groups.impl.shib; -import edu.harvard.iq.dataverse.authorization.RoleAssignee; import edu.harvard.iq.dataverse.authorization.RoleAssigneeDisplayInfo; import edu.harvard.iq.dataverse.authorization.groups.Group; import edu.harvard.iq.dataverse.authorization.groups.GroupProvider; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import java.io.Serializable; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Transient; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Transient; /** * Persistence for Shibboleth groups. diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroupServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroupServiceBean.java index c15e56ee7e0..7a7844b7c1e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroupServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroupServiceBean.java @@ -11,14 +11,14 @@ import java.util.List; import java.util.Set; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.NonUniqueResultException; -import javax.persistence.PersistenceContext; -import javax.persistence.TypedQuery; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.NonUniqueResultException; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.TypedQuery; /** * @todo Consider merging this bean into the newer and more generic diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/AuthenticationProviderRow.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/AuthenticationProviderRow.java index 6b9c545b7f9..2f37c777877 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/AuthenticationProviderRow.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/AuthenticationProviderRow.java @@ -2,13 +2,13 @@ import edu.harvard.iq.dataverse.authorization.AuthenticationProvider; import java.util.Objects; -import javax.persistence.Entity; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.Lob; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.Table; +import jakarta.persistence.Entity; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.Lob; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.Table; /** * Database-storable form of an {@code AuthenticationProvider}. diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java index c2510b8b043..2ce36997ea9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java @@ -5,20 +5,20 @@ import edu.harvard.iq.dataverse.passwordreset.PasswordResetData; import java.io.Serializable; -import javax.persistence.CascadeType; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.OneToOne; -import javax.persistence.Table; -import javax.persistence.Transient; -import javax.validation.constraints.NotBlank; -import javax.validation.constraints.Size; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.OneToOne; +import jakarta.persistence.Table; +import jakarta.persistence.Transient; +import jakarta.validation.constraints.NotBlank; +import jakarta.validation.constraints.Size; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUserServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUserServiceBean.java index c39c7cb2985..ffbc5d7a027 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUserServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUserServiceBean.java @@ -9,17 +9,17 @@ import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.NonUniqueResultException; -import javax.persistence.PersistenceContext; -import javax.validation.ConstraintViolation; -import javax.validation.Validation; -import javax.validation.Validator; -import javax.validation.ValidatorFactory; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.NonUniqueResultException; +import jakarta.persistence.PersistenceContext; +import jakarta.validation.ConstraintViolation; +import jakarta.validation.Validation; +import jakarta.validation.Validator; +import jakarta.validation.ValidatorFactory; /** * @@ -88,7 +88,7 @@ public BuiltinUser findByUserName(String userName) { return em.createNamedQuery("BuiltinUser.findByUserName", BuiltinUser.class) .setParameter("userName", userName) .getSingleResult(); - } catch (javax.persistence.NoResultException e) { + } catch (NoResultException e) { return null; } catch (NonUniqueResultException ex) { logger.log(Level.WARNING, "multiple accounts found for username {0}", userName); diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java index 5c0f3a49f76..dc4644dfccd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java @@ -51,15 +51,15 @@ import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; -import javax.ejb.EJB; -import javax.faces.application.FacesMessage; -import javax.faces.component.UIComponent; -import javax.faces.component.UIInput; -import javax.faces.context.FacesContext; -import javax.faces.event.ActionEvent; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.component.UIComponent; +import jakarta.faces.component.UIInput; +import jakarta.faces.context.FacesContext; +import jakarta.faces.event.ActionEvent; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; import org.apache.commons.lang3.StringUtils; import org.hibernate.validator.constraints.NotBlank; diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java index 01139cd2e27..48efe1e2592 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java @@ -14,7 +14,7 @@ import edu.harvard.iq.dataverse.authorization.AuthenticationProviderDisplayInfo; import edu.harvard.iq.dataverse.util.BundleUtil; -import javax.validation.constraints.NotNull; +import jakarta.validation.constraints.NotNull; import java.io.IOException; import java.util.*; import java.util.concurrent.ExecutionException; diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java index 54ba3ec6a05..821e8a5ea6c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java @@ -30,14 +30,14 @@ import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.enterprise.context.SessionScoped; -import javax.faces.application.FacesMessage; -import javax.faces.component.UIComponent; -import javax.faces.component.UIInput; -import javax.faces.context.FacesContext; -import javax.inject.Named; -import javax.inject.Inject; +import jakarta.ejb.EJB; +import jakarta.enterprise.context.SessionScoped; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.component.UIComponent; +import jakarta.faces.component.UIInput; +import jakarta.faces.context.FacesContext; +import jakarta.inject.Named; +import jakarta.inject.Inject; import org.hibernate.validator.constraints.NotBlank; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java index c5be41a014a..99df2375a79 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java @@ -20,12 +20,12 @@ import java.util.logging.Level; import java.util.logging.Logger; import static java.util.stream.Collectors.toList; -import javax.ejb.EJB; -import javax.inject.Named; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.servlet.http.HttpServletRequest; -import javax.validation.constraints.NotNull; +import jakarta.ejb.EJB; +import jakarta.inject.Named; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.validation.constraints.NotNull; import static edu.harvard.iq.dataverse.util.StringUtil.toOption; import edu.harvard.iq.dataverse.util.SystemConfig; diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenData.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenData.java index a5ee5ddf537..59f659ff297 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenData.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenData.java @@ -4,14 +4,14 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import java.io.Serializable; import java.sql.Timestamp; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.ManyToOne; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; /** * Token data for a given user, received from an OAuth2 system. Contains the diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenDataServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenDataServiceBean.java index d8f1fa7600b..b1dcb6df8cc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenDataServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenDataServiceBean.java @@ -2,10 +2,10 @@ import java.util.List; import java.util.Optional; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; /** * CRUD for {@link OAuth2TokenData}. diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GitHubOAuth2AP.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GitHubOAuth2AP.java index 62f3cc382e2..8829a25336b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GitHubOAuth2AP.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GitHubOAuth2AP.java @@ -9,9 +9,9 @@ import edu.harvard.iq.dataverse.util.BundleUtil; import java.io.StringReader; import java.util.Collections; -import javax.json.Json; -import javax.json.JsonObject; -import javax.json.JsonReader; +import jakarta.json.Json; +import jakarta.json.JsonObject; +import jakarta.json.JsonReader; /** * IDP adaptor for GitHub.com diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GoogleOAuth2AP.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GoogleOAuth2AP.java index 1fa5470d551..a864ecb810a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GoogleOAuth2AP.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GoogleOAuth2AP.java @@ -8,9 +8,9 @@ import java.io.StringReader; import java.util.Arrays; import java.util.UUID; -import javax.json.Json; -import javax.json.JsonObject; -import javax.json.JsonReader; +import jakarta.json.Json; +import jakarta.json.JsonObject; +import jakarta.json.JsonReader; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/MicrosoftOAuth2AP.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/MicrosoftOAuth2AP.java index da260a9fb0e..bd3caccc220 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/MicrosoftOAuth2AP.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/MicrosoftOAuth2AP.java @@ -8,9 +8,9 @@ import java.util.Collections; import java.util.logging.Logger; import java.io.StringReader; -import javax.json.Json; -import javax.json.JsonObject; -import javax.json.JsonReader; +import jakarta.json.Json; +import jakarta.json.JsonObject; +import jakarta.json.JsonReader; import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/OrcidOAuth2AP.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/OrcidOAuth2AP.java index 02177ee0032..089ca40e164 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/OrcidOAuth2AP.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/OrcidOAuth2AP.java @@ -23,10 +23,10 @@ import static java.util.stream.Collectors.joining; import java.util.stream.IntStream; import java.util.stream.Stream; -import javax.json.Json; -import javax.json.JsonObject; -import javax.json.JsonReader; -import javax.validation.constraints.NotNull; +import jakarta.json.Json; +import jakarta.json.JsonObject; +import jakarta.json.JsonReader; +import jakarta.validation.constraints.NotNull; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibServiceBean.java index 3e986a15689..0921b2c6683 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibServiceBean.java @@ -4,17 +4,11 @@ import com.google.gson.JsonElement; import com.google.gson.JsonParser; import edu.harvard.iq.dataverse.authorization.AuthTestDataServiceBean; -import edu.harvard.iq.dataverse.authorization.AuthenticationRequest; -import edu.harvard.iq.dataverse.authorization.AuthenticationResponse; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; -import edu.harvard.iq.dataverse.authorization.exceptions.AuthenticationFailedException; -import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinAuthenticationProvider; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean; -import edu.harvard.iq.dataverse.authorization.providers.builtin.PasswordEncryption; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.IOException; import java.io.InputStream; @@ -25,11 +19,11 @@ import java.util.Map; import java.util.UUID; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.servlet.http.HttpServletRequest; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.servlet.http.HttpServletRequest; @Named @Stateless diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java index f8b30710656..fff135e0dec 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java @@ -12,7 +12,7 @@ import java.util.Map; import java.util.UUID; import java.util.logging.Logger; -import javax.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletRequest; public class ShibUtil { diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/ApiToken.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/ApiToken.java index fc7ed8a9060..0de7d7754a1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/ApiToken.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/ApiToken.java @@ -2,18 +2,18 @@ import java.io.Serializable; import java.sql.Timestamp; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.Table; -import javax.validation.constraints.NotNull; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.Table; +import jakarta.validation.constraints.NotNull; @Entity @NamedQueries({ diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java index 9fdfce2f1a7..89429b912f6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java @@ -23,22 +23,22 @@ import java.util.Objects; import java.util.Set; -import javax.json.Json; -import javax.json.JsonObjectBuilder; -import javax.persistence.CascadeType; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.OneToMany; -import javax.persistence.OneToOne; -import javax.persistence.PostLoad; -import javax.persistence.PrePersist; -import javax.persistence.Transient; -import javax.validation.constraints.NotNull; +import jakarta.json.Json; +import jakarta.json.JsonObjectBuilder; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.OneToMany; +import jakarta.persistence.OneToOne; +import jakarta.persistence.PostLoad; +import jakarta.persistence.PrePersist; +import jakarta.persistence.Transient; +import jakarta.validation.constraints.NotNull; import org.hibernate.validator.constraints.NotBlank; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/entities/JobExecutionEntity.java b/src/main/java/edu/harvard/iq/dataverse/batch/entities/JobExecutionEntity.java index be2167fa4d5..debece131d3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/batch/entities/JobExecutionEntity.java +++ b/src/main/java/edu/harvard/iq/dataverse/batch/entities/JobExecutionEntity.java @@ -1,10 +1,10 @@ package edu.harvard.iq.dataverse.batch.entities; -import javax.batch.operations.JobOperator; -import javax.batch.runtime.BatchRuntime; -import javax.batch.runtime.BatchStatus; -import javax.batch.runtime.JobExecution; -import javax.batch.runtime.StepExecution; +import jakarta.batch.operations.JobOperator; +import jakarta.batch.runtime.BatchRuntime; +import jakarta.batch.runtime.BatchStatus; +import jakarta.batch.runtime.JobExecution; +import jakarta.batch.runtime.StepExecution; import java.util.ArrayList; import java.util.Date; import java.util.LinkedHashMap; diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/entities/StepExecutionEntity.java b/src/main/java/edu/harvard/iq/dataverse/batch/entities/StepExecutionEntity.java index 65ed3f32e1b..ba20386ed07 100644 --- a/src/main/java/edu/harvard/iq/dataverse/batch/entities/StepExecutionEntity.java +++ b/src/main/java/edu/harvard/iq/dataverse/batch/entities/StepExecutionEntity.java @@ -1,8 +1,8 @@ package edu.harvard.iq.dataverse.batch.entities; -import javax.batch.runtime.BatchStatus; -import javax.batch.runtime.Metric; -import javax.batch.runtime.StepExecution; +import jakarta.batch.runtime.BatchStatus; +import jakarta.batch.runtime.Metric; +import jakarta.batch.runtime.StepExecution; import java.util.Date; import java.util.HashMap; import java.util.Map; diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java index a5ba9a00bd2..593a5cbfdc3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java +++ b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java @@ -40,22 +40,22 @@ import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; -import javax.batch.api.BatchProperty; -import javax.batch.api.chunk.listener.ItemReadListener; -import javax.batch.api.listener.JobListener; -import javax.batch.api.listener.StepListener; -import javax.batch.operations.JobOperator; -import javax.batch.runtime.BatchRuntime; -import javax.batch.runtime.BatchStatus; -import javax.batch.runtime.JobExecution; -import javax.batch.runtime.StepExecution; -import javax.batch.runtime.context.JobContext; -import javax.batch.runtime.context.StepContext; -import javax.ejb.EJB; -import javax.enterprise.context.Dependent; -import javax.inject.Inject; -import javax.inject.Named; -import javax.servlet.http.HttpServletRequest; +import jakarta.batch.api.BatchProperty; +import jakarta.batch.api.chunk.listener.ItemReadListener; +import jakarta.batch.api.listener.JobListener; +import jakarta.batch.api.listener.StepListener; +import jakarta.batch.operations.JobOperator; +import jakarta.batch.runtime.BatchRuntime; +import jakarta.batch.runtime.BatchStatus; +import jakarta.batch.runtime.JobExecution; +import jakarta.batch.runtime.StepExecution; +import jakarta.batch.runtime.context.JobContext; +import jakarta.batch.runtime.context.StepContext; +import jakarta.ejb.EJB; +import jakarta.enterprise.context.Dependent; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.servlet.http.HttpServletRequest; import edu.harvard.iq.dataverse.settings.JvmSettings; import org.apache.commons.io.IOUtils; @@ -74,8 +74,8 @@ import java.util.logging.Handler; import java.util.logging.Level; import java.util.logging.Logger; -import javax.batch.operations.JobSecurityException; -import javax.batch.operations.NoSuchJobExecutionException; +import jakarta.batch.operations.JobSecurityException; +import jakarta.batch.operations.NoSuchJobExecutionException; @Named @Dependent diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordProcessor.java b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordProcessor.java index af7caf32a7c..e5db80b9aa6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordProcessor.java +++ b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordProcessor.java @@ -25,15 +25,15 @@ import edu.harvard.iq.dataverse.DatasetServiceBean; import edu.harvard.iq.dataverse.DatasetVersion; -import javax.annotation.PostConstruct; -import javax.batch.api.chunk.ItemProcessor; -import javax.batch.operations.JobOperator; -import javax.batch.runtime.BatchRuntime; -import javax.batch.runtime.context.JobContext; -import javax.ejb.EJB; -import javax.enterprise.context.Dependent; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.annotation.PostConstruct; +import jakarta.batch.api.chunk.ItemProcessor; +import jakarta.batch.operations.JobOperator; +import jakarta.batch.runtime.BatchRuntime; +import jakarta.batch.runtime.context.JobContext; +import jakarta.ejb.EJB; +import jakarta.enterprise.context.Dependent; +import jakarta.inject.Inject; +import jakarta.inject.Named; import java.io.File; import java.util.Properties; import java.util.logging.Level; diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordReader.java b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordReader.java index a4f8ffd2378..fb702c21df2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordReader.java @@ -28,17 +28,17 @@ import org.apache.commons.io.filefilter.NotFileFilter; import org.apache.commons.io.filefilter.WildcardFileFilter; -import javax.annotation.PostConstruct; -import javax.batch.api.BatchProperty; -import javax.batch.api.chunk.AbstractItemReader; -import javax.batch.operations.JobOperator; -import javax.batch.runtime.BatchRuntime; -import javax.batch.runtime.context.JobContext; -import javax.batch.runtime.context.StepContext; -import javax.ejb.EJB; -import javax.enterprise.context.Dependent; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.annotation.PostConstruct; +import jakarta.batch.api.BatchProperty; +import jakarta.batch.api.chunk.AbstractItemReader; +import jakarta.batch.operations.JobOperator; +import jakarta.batch.runtime.BatchRuntime; +import jakarta.batch.runtime.context.JobContext; +import jakarta.batch.runtime.context.StepContext; +import jakarta.ejb.EJB; +import jakarta.enterprise.context.Dependent; +import jakarta.inject.Inject; +import jakarta.inject.Named; import java.io.File; import java.io.FileFilter; import java.io.Serializable; diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordWriter.java b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordWriter.java index 195ed57bd43..ba34a3d1ed1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordWriter.java +++ b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordWriter.java @@ -36,17 +36,17 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.FileUtil; -import javax.annotation.PostConstruct; -import javax.batch.api.BatchProperty; -import javax.batch.api.chunk.AbstractItemWriter; -import javax.batch.operations.JobOperator; -import javax.batch.runtime.BatchRuntime; -import javax.batch.runtime.context.JobContext; -import javax.batch.runtime.context.StepContext; -import javax.ejb.EJB; -import javax.enterprise.context.Dependent; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.annotation.PostConstruct; +import jakarta.batch.api.BatchProperty; +import jakarta.batch.api.chunk.AbstractItemWriter; +import jakarta.batch.operations.JobOperator; +import jakarta.batch.runtime.BatchRuntime; +import jakarta.batch.runtime.context.JobContext; +import jakarta.batch.runtime.context.StepContext; +import jakarta.ejb.EJB; +import jakarta.enterprise.context.Dependent; +import jakarta.inject.Inject; +import jakarta.inject.Named; import java.io.File; import java.io.Serializable; import java.sql.Timestamp; @@ -57,7 +57,7 @@ import java.util.Properties; import java.util.logging.Level; import java.util.logging.Logger; -import javax.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletRequest; import edu.harvard.iq.dataverse.GlobalIdServiceBean; @Named diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/util/LoggingUtil.java b/src/main/java/edu/harvard/iq/dataverse/batch/util/LoggingUtil.java index a2f76ca953d..19d1112ba54 100644 --- a/src/main/java/edu/harvard/iq/dataverse/batch/util/LoggingUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/batch/util/LoggingUtil.java @@ -25,7 +25,7 @@ import edu.harvard.iq.dataverse.engine.command.Command; import org.apache.commons.io.FileUtils; -import javax.batch.runtime.JobExecution; +import jakarta.batch.runtime.JobExecution; import java.io.File; import java.io.IOException; import java.text.SimpleDateFormat; diff --git a/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtil.java b/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtil.java index 3cb071fe03f..c230229abf9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtil.java @@ -6,7 +6,7 @@ import java.util.Arrays; import java.util.logging.Logger; -import javax.mail.internet.InternetAddress; +import jakarta.mail.internet.InternetAddress; public class BrandingUtil { diff --git a/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtilHelper.java b/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtilHelper.java index 274970f8b8e..7729ab4763e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtilHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtilHelper.java @@ -1,9 +1,9 @@ package edu.harvard.iq.dataverse.branding; -import javax.annotation.PostConstruct; -import javax.ejb.EJB; -import javax.ejb.Singleton; -import javax.ejb.Startup; +import jakarta.annotation.PostConstruct; +import jakarta.ejb.EJB; +import jakarta.ejb.Singleton; +import jakarta.ejb.Startup; import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; diff --git a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailData.java b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailData.java index c05750c13e6..0ad9ab59f4b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailData.java +++ b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailData.java @@ -5,17 +5,17 @@ import java.sql.Timestamp; import java.util.Date; import java.util.UUID; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.OneToOne; -import javax.persistence.Table; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.OneToOne; +import jakarta.persistence.Table; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java index 07aea0d5011..b76e3db1379 100644 --- a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java @@ -7,10 +7,10 @@ import edu.harvard.iq.dataverse.util.JsfHelper; import java.util.Arrays; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java index e1053c3a93f..a54fd6bb0c1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java @@ -1,6 +1,5 @@ package edu.harvard.iq.dataverse.confirmemail; -import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; @@ -16,13 +15,13 @@ import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.NonUniqueResultException; -import javax.persistence.PersistenceContext; -import javax.persistence.TypedQuery; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.NonUniqueResultException; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.TypedQuery; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardDatamovePage.java b/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardDatamovePage.java index 54e3114a0ae..6fc80312bf5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardDatamovePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardDatamovePage.java @@ -8,7 +8,6 @@ import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.PermissionsWrapper; import edu.harvard.iq.dataverse.SettingsWrapper; -import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; @@ -22,16 +21,16 @@ import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.faces.application.FacesMessage; -import javax.faces.component.UIInput; -import javax.faces.context.FacesContext; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.servlet.http.HttpServletRequest; +import jakarta.ejb.EJB; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.component.UIInput; +import jakarta.faces.context.FacesContext; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.servlet.http.HttpServletRequest; @ViewScoped @Named("DashboardDatamovePage") diff --git a/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardUsersPage.java b/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardUsersPage.java index 5b5a21e21bf..477e4c0fdd6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardUsersPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardUsersPage.java @@ -24,10 +24,10 @@ import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; @ViewScoped @Named("DashboardUsersPage") diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java index 8ee3f0cf53c..d95df1567bd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java @@ -565,21 +565,26 @@ private String getDatasetDirectory() throws IOException { if (isDirectAccess()) { throw new IOException("No DvObject defined in the Data Access Object"); } - - Path datasetDirectoryPath=null; + String authorityForFS = null; + String identifierForFS = null; if (dvObject instanceof Dataset) { - datasetDirectoryPath = Paths.get(this.getDataset().getAuthorityForFileStorage(), this.getDataset().getIdentifierForFileStorage()); + authorityForFS = this.getDataset().getAuthorityForFileStorage(); + identifierForFS = this.getDataset().getIdentifierForFileStorage(); } else if (dvObject instanceof DataFile) { - datasetDirectoryPath = Paths.get(this.getDataFile().getOwner().getAuthorityForFileStorage(), this.getDataFile().getOwner().getIdentifierForFileStorage()); + authorityForFS = this.getDataFile().getOwner().getAuthorityForFileStorage(); + identifierForFS = this.getDataFile().getOwner().getIdentifierForFileStorage(); } else if (dvObject instanceof Dataverse) { throw new IOException("FileAccessIO: Dataverses are not a supported dvObject"); } - - if (datasetDirectoryPath == null) { + + if (authorityForFS == null || identifierForFS == null) { throw new IOException("Could not determine the filesystem directory of the parent dataset."); } - String datasetDirectory = Paths.get(getFilesRootDirectory(), datasetDirectoryPath.toString()).toString(); + + // Determine the final directory tree. As of JDK 16, the first component of the path MUST be non-null + // (we check for that via the setting), but also the others make no sense if they are null. + String datasetDirectory = Paths.get(getFilesRootDirectory(), authorityForFS, identifierForFS).toString(); if (dvObject.getStorageIdentifier() == null || dvObject.getStorageIdentifier().isEmpty()) { throw new IOException("Data Access: No local storage identifier defined for this datafile."); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index f6bca84941e..822ada0b83e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -69,9 +69,9 @@ import org.eclipse.microprofile.config.Config; import org.eclipse.microprofile.config.ConfigProvider; -import javax.json.Json; -import javax.json.JsonObjectBuilder; -import javax.validation.constraints.NotNull; +import jakarta.json.Json; +import jakarta.json.JsonObjectBuilder; +import jakarta.validation.constraints.NotNull; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleServiceBean.java index c33b4e0fc71..bf5d4a0d6ab 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleServiceBean.java @@ -6,8 +6,8 @@ import com.mashape.unirest.http.Unirest; import com.mashape.unirest.http.exceptions.UnirestException; import java.io.Serializable; -import javax.ejb.Stateless; -import javax.inject.Named; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; /** * This class contains all the methods that have external runtime dependencies diff --git a/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java b/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java index 1aa384d205e..460e4727afc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java @@ -8,9 +8,9 @@ import edu.harvard.iq.dataverse.util.SystemConfig; import java.util.Arrays; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; +import jakarta.json.Json; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; public class DataCaptureModuleUtil { diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index 4c2510b6ccb..98d5afc47e6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -17,7 +17,6 @@ import edu.harvard.iq.dataverse.PermissionServiceBean; import edu.harvard.iq.dataverse.api.ApiConstants; import edu.harvard.iq.dataverse.api.Util; -import edu.harvard.iq.dataverse.api.Files; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.dataaccess.DataAccess; @@ -48,23 +47,22 @@ import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.Asynchronous; -import javax.ejb.EJBException; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonNumber; -import javax.json.JsonObject; -import javax.json.JsonArray; -import javax.json.JsonObjectBuilder; -import javax.validation.ConstraintViolation; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; +import jakarta.ejb.Asynchronous; +import jakarta.ejb.EJBException; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonNumber; +import jakarta.json.JsonObject; +import jakarta.json.JsonArray; +import jakarta.json.JsonObjectBuilder; +import jakarta.validation.ConstraintViolation; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import org.apache.commons.io.IOUtils; -import org.ocpsoft.common.util.Strings; -import static javax.ws.rs.core.Response.Status.BAD_REQUEST; +import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; /** * Methods to add or replace a single file. @@ -1217,7 +1215,7 @@ private boolean step_030_createNewFilesViaIngest(){ initialFileList = result.getDataFiles(); } catch (IOException ex) { - if (!Strings.isNullOrEmpty(ex.getMessage())) { + if (ex.getMessage() != null && !ex.getMessage().isEmpty()) { this.addErrorSevere(getBundleErr("ingest_create_file_err") + " " + ex.getMessage()); } else { this.addErrorSevere(getBundleErr("ingest_create_file_err")); @@ -2152,7 +2150,7 @@ public Response addFiles(String jsonData, Dataset dataset, User authUser) { } } - catch ( javax.json.stream.JsonParsingException ex) { + catch ( jakarta.json.stream.JsonParsingException ex) { ex.printStackTrace(); return error(BAD_REQUEST, "Json Parsing Exception :" + ex.getMessage()); } @@ -2321,7 +2319,7 @@ public Response replaceFiles(String jsonData, Dataset ds, User authUser) { } } - catch ( javax.json.stream.JsonParsingException ex) { + catch ( jakarta.json.stream.JsonParsingException ex) { ex.printStackTrace(); return error(BAD_REQUEST, "Json Parsing Exception :" + ex.getMessage()); } diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/CategoryMetadata.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/CategoryMetadata.java index 5e03899b790..a9b1694d842 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datavariable/CategoryMetadata.java +++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/CategoryMetadata.java @@ -1,13 +1,13 @@ package edu.harvard.iq.dataverse.datavariable; -import javax.persistence.Index; -import javax.persistence.Entity; -import javax.persistence.Table; -import javax.persistence.GenerationType; -import javax.persistence.GeneratedValue; -import javax.persistence.Id; -import javax.persistence.ManyToOne; -import javax.persistence.JoinColumn; +import jakarta.persistence.Index; +import jakarta.persistence.Entity; +import jakarta.persistence.Table; +import jakarta.persistence.GenerationType; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.Id; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.JoinColumn; @Entity @Table(indexes = {@Index(columnList="category_id"), @Index(columnList="variablemetadata_id")}) diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/DataVariable.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/DataVariable.java index 6462f690cac..b2e9441a163 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datavariable/DataVariable.java +++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/DataVariable.java @@ -8,22 +8,22 @@ import java.io.Serializable; import java.util.Collection; -import javax.persistence.CascadeType; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.OneToMany; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.OneToMany; import org.hibernate.validator.constraints.NotBlank; import edu.harvard.iq.dataverse.DataTable; import java.util.ArrayList; import java.util.List; -import javax.persistence.Column; -import javax.persistence.Index; -import javax.persistence.OrderBy; -import javax.persistence.Table; +import jakarta.persistence.Column; +import jakarta.persistence.Index; +import jakarta.persistence.OrderBy; +import jakarta.persistence.Table; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/SummaryStatistic.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/SummaryStatistic.java index bf81aff3e2b..6896ef360ce 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datavariable/SummaryStatistic.java +++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/SummaryStatistic.java @@ -7,14 +7,14 @@ package edu.harvard.iq.dataverse.datavariable; import java.io.Serializable; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.Table; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Table; /* * diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/VarGroup.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/VarGroup.java index 242110e333f..b52c76930d7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datavariable/VarGroup.java +++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/VarGroup.java @@ -1,14 +1,14 @@ package edu.harvard.iq.dataverse.datavariable; -import javax.persistence.Entity; -import javax.persistence.Table; -import javax.persistence.Index; -import javax.persistence.Id; -import javax.persistence.ManyToOne; -import javax.persistence.JoinColumn; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.Table; +import jakarta.persistence.Index; +import jakarta.persistence.Id; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Column; import java.util.HashSet; import java.util.Set; diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableCategory.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableCategory.java index 6a3e702a561..5ccef82b5d1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableCategory.java +++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableCategory.java @@ -8,16 +8,16 @@ import java.io.Serializable; import java.util.List; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; import edu.harvard.iq.dataverse.util.AlphaNumericComparator; -import javax.persistence.Index; -import javax.persistence.Table; +import jakarta.persistence.Index; +import jakarta.persistence.Table; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableMetadata.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableMetadata.java index c18355c9979..29e821c28a4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableMetadata.java +++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableMetadata.java @@ -2,18 +2,18 @@ import java.io.Serializable; -import javax.persistence.Entity; -import javax.persistence.Table; -import javax.persistence.Index; -import javax.persistence.UniqueConstraint; -import javax.persistence.Id; -import javax.persistence.ManyToOne; -import javax.persistence.JoinColumn; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Column; -import javax.persistence.OneToMany; -import javax.persistence.CascadeType; +import jakarta.persistence.Entity; +import jakarta.persistence.Table; +import jakarta.persistence.Index; +import jakarta.persistence.UniqueConstraint; +import jakarta.persistence.Id; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Column; +import jakarta.persistence.OneToMany; +import jakarta.persistence.CascadeType; import java.util.Collection; import java.util.ArrayList; diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRange.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRange.java index 17098e6af54..eb04eac846b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRange.java +++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRange.java @@ -7,14 +7,14 @@ package edu.harvard.iq.dataverse.datavariable; import java.io.Serializable; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.Table; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Table; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRangeItem.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRangeItem.java index 81db4225515..d5f99f7e016 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRangeItem.java +++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRangeItem.java @@ -8,14 +8,14 @@ import java.io.Serializable; import java.math.BigDecimal; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.Table; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Table; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableServiceBean.java index 8287d1c7041..9fb4a3fd34c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableServiceBean.java @@ -7,12 +7,12 @@ package edu.harvard.iq.dataverse.datavariable; import java.util.List; -import java.util.logging.Logger; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.TypedQuery; + +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.TypedQuery; /** * @@ -105,7 +105,7 @@ public VariableFormatType findVariableFormatTypeByName(String name) { VariableFormatType type = null; try { type = (VariableFormatType)query.getSingleResult(); - } catch (javax.persistence.NoResultException e) { + } catch (jakarta.persistence.NoResultException e) { // DO nothing, just return null. } return type; @@ -116,7 +116,7 @@ public VariableIntervalType findVariableIntervalTypeByName(String name) { VariableIntervalType type = null; try { type=(VariableIntervalType)em.createQuery(query).getSingleResult(); - } catch (javax.persistence.NoResultException e) { + } catch (jakarta.persistence.NoResultException e) { // DO nothing, just return null. } return type; @@ -127,7 +127,7 @@ public SummaryStatisticType findSummaryStatisticTypeByName(String name) { SummaryStatisticType type = null; try { type = (SummaryStatisticType) em.createQuery(query).getSingleResult(); - } catch (javax.persistence.NoResultException e) { + } catch (jakarta.persistence.NoResultException e) { // DO nothing, just return null. } return type; diff --git a/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java index 52a9501aefb..7964c56835e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java @@ -16,7 +16,7 @@ import java.util.Map; import java.util.logging.Logger; -import javax.ws.rs.BadRequestException; +import jakarta.ws.rs.BadRequestException; import opennlp.tools.util.StringUtil; import org.apache.commons.io.FileUtils; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java index ef5b9dba407..55a375acb6c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java @@ -42,7 +42,7 @@ import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean; import java.util.Stack; -import javax.persistence.EntityManager; +import jakarta.persistence.EntityManager; /** * An interface for accessing Dataverse's resources, user info etc. Used by the diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java index 05be37802f0..d792b616a0c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java @@ -1,7 +1,6 @@ package edu.harvard.iq.dataverse.engine.command; import edu.harvard.iq.dataverse.api.AbstractApiBean; -import edu.harvard.iq.dataverse.api.batchjob.FileRecordJobResource; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; @@ -11,7 +10,7 @@ import java.util.logging.Level; import java.util.logging.Logger; -import javax.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletRequest; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java index 36fe3ea7bc1..6061461306d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java @@ -14,7 +14,6 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.util.BundleUtil; -import edu.harvard.iq.dataverse.util.json.JsonUtil; import java.sql.Timestamp; import java.util.Date; @@ -23,14 +22,10 @@ import java.util.logging.Logger; import static java.util.stream.Collectors.joining; -import javax.json.JsonObject; -import javax.servlet.http.HttpServletRequest; -import javax.validation.ConstraintViolation; +import jakarta.validation.ConstraintViolation; import edu.harvard.iq.dataverse.GlobalIdServiceBean; import edu.harvard.iq.dataverse.MetadataBlock; -import edu.harvard.iq.dataverse.MetadataBlockServiceBean; import edu.harvard.iq.dataverse.TermsOfUseAndAccess; -import edu.harvard.iq.dataverse.pidproviders.FakePidProviderServiceBean; import edu.harvard.iq.dataverse.settings.JvmSettings; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddRoleAssigneesToExplicitGroupCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddRoleAssigneesToExplicitGroupCommand.java index 8ba1d181609..59c5d970b09 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddRoleAssigneesToExplicitGroupCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddRoleAssigneesToExplicitGroupCommand.java @@ -16,7 +16,7 @@ import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJBException; +import jakarta.ejb.EJBException; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ChangeUserIdentifierCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ChangeUserIdentifierCommand.java index 4a5998aea00..94aff3e3f5d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ChangeUserIdentifierCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ChangeUserIdentifierCommand.java @@ -18,11 +18,11 @@ import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import java.util.List; import java.util.Set; -import javax.validation.ConstraintViolation; -import javax.validation.Validation; -import javax.validation.Validator; -import javax.validation.ValidatorFactory; -import javax.ws.rs.core.Response; +import jakarta.validation.ConstraintViolation; +import jakarta.validation.Validation; +import jakarta.validation.Validator; +import jakarta.validation.ValidatorFactory; +import jakarta.ws.rs.core.Response; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java index cb9b0a3c774..8cffcd3d821 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java @@ -1,7 +1,6 @@ package edu.harvard.iq.dataverse.engine.command.impl; import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.api.AbstractApiBean; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; @@ -13,7 +12,7 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.util.BundleUtil; -import javax.persistence.NoResultException; +import jakarta.persistence.NoResultException; /** * Create a new role in a dataverse. diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateSavedSearchCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateSavedSearchCommand.java index 147e1870566..7a549a51dd5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateSavedSearchCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateSavedSearchCommand.java @@ -12,7 +12,7 @@ import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean; import java.util.logging.Level; import java.util.logging.Logger; -import javax.json.JsonObjectBuilder; +import jakarta.json.JsonObjectBuilder; @RequiredPermissions(Permission.PublishDataverse) public class CreateSavedSearchCommand extends AbstractCommand { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DRSSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DRSSubmitToArchiveCommand.java index f23033f09fa..594d4fe25ba 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DRSSubmitToArchiveCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DRSSubmitToArchiveCommand.java @@ -34,10 +34,10 @@ import java.util.Set; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.json.JsonValue; +import jakarta.json.Json; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.json.JsonValue; import javax.net.ssl.SSLContext; import org.apache.commons.codec.digest.DigestUtils; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java index 2ca73af3b3c..d6d7b49d172 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java @@ -21,8 +21,8 @@ import java.util.Map; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonObjectBuilder; +import jakarta.json.Json; +import jakarta.json.JsonObjectBuilder; import org.apache.commons.codec.binary.Hex; import org.duracloud.client.ContentStore; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java index 253c761f0c3..f5e70209744 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java @@ -37,8 +37,8 @@ import java.util.concurrent.Future; import org.apache.solr.client.solrj.SolrServerException; -import javax.ejb.EJB; -import javax.inject.Inject; +import jakarta.ejb.EJB; +import jakarta.inject.Inject; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetProvJsonCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetProvJsonCommand.java index 23f08aadd3e..2de2adff099 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetProvJsonCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetProvJsonCommand.java @@ -12,9 +12,9 @@ import java.io.IOException; import java.io.InputStream; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonObject; -import javax.json.JsonReader; +import jakarta.json.Json; +import jakarta.json.JsonObject; +import jakarta.json.JsonReader; @RequiredPermissions(Permission.EditDataset) public class GetProvJsonCommand extends AbstractCommand { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java index f3324ba6f2e..e41d70d9804 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java @@ -17,14 +17,14 @@ import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; import edu.harvard.iq.dataverse.search.savedsearch.SavedSearch; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; -import java.math.BigDecimal; + import java.util.List; import java.util.Set; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObjectBuilder; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObjectBuilder; // Superuser-only enforced below. @RequiredPermissions({}) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java index da2701a41e7..512987866d4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java @@ -19,8 +19,8 @@ import edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult; import org.apache.commons.codec.binary.Hex; -import javax.json.Json; -import javax.json.JsonObjectBuilder; +import jakarta.json.Json; +import jakarta.json.JsonObjectBuilder; import java.io.File; import java.io.FileInputStream; import java.io.IOException; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportFromFileSystemCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportFromFileSystemCommand.java index 5f31ea756eb..c03c77d42fd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportFromFileSystemCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportFromFileSystemCommand.java @@ -14,12 +14,12 @@ import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.settings.JvmSettings; -import javax.batch.operations.JobOperator; -import javax.batch.operations.JobSecurityException; -import javax.batch.operations.JobStartException; -import javax.batch.runtime.BatchRuntime; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; +import jakarta.batch.operations.JobOperator; +import jakarta.batch.operations.JobSecurityException; +import jakarta.batch.operations.JobStartException; +import jakarta.batch.runtime.BatchRuntime; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; import java.io.File; import java.util.Properties; import java.util.logging.Level; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java index 1c63a1a3c4f..55fe96556a5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java @@ -23,8 +23,8 @@ import java.util.Arrays; import java.util.Collections; import java.util.Date; -import java.util.concurrent.Future; -import javax.ws.rs.core.Response; + +import jakarta.ws.rs.core.Response; import org.apache.solr.client.solrj.SolrServerException; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LocalSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LocalSubmitToArchiveCommand.java index c7e91b2967b..d2f061b6e70 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LocalSubmitToArchiveCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LocalSubmitToArchiveCommand.java @@ -17,8 +17,8 @@ import java.util.Map; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonObjectBuilder; +import jakarta.json.Json; +import jakarta.json.JsonObjectBuilder; import java.io.File; import java.io.FileOutputStream; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RedetectFileTypeCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RedetectFileTypeCommand.java index bdb6ceffd6d..b9346a43af8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RedetectFileTypeCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RedetectFileTypeCommand.java @@ -20,7 +20,7 @@ import java.nio.channels.FileChannel; import java.nio.channels.ReadableByteChannel; import java.util.logging.Logger; -import javax.ejb.EJBException; +import jakarta.ejb.EJBException; @RequiredPermissions(Permission.EditDataset) public class RedetectFileTypeCommand extends AbstractCommand { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java index f24d956e9d7..f02edd54b86 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java @@ -20,9 +20,9 @@ import java.util.Map; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; +import jakarta.json.Json; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; import org.eclipse.microprofile.config.Config; import org.eclipse.microprofile.config.ConfigProvider; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UningestFileCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UningestFileCommand.java index 29180f65e36..f2b89746160 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UningestFileCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UningestFileCommand.java @@ -22,11 +22,11 @@ import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; import edu.harvard.iq.dataverse.util.FileUtil; -import edu.harvard.iq.dataverse.util.StringUtil; + import java.io.IOException; import java.util.Collections; import java.util.logging.Logger; -import javax.persistence.Query; +import jakarta.persistence.Query; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java index 5f4ac24c1d9..7591bebe796 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java @@ -17,7 +17,7 @@ import java.util.logging.Level; import java.util.logging.Logger; -import javax.validation.ConstraintViolationException; +import jakarta.validation.ConstraintViolationException; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index 218b0ea89d9..56c76f04c05 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -6,7 +6,7 @@ import edu.harvard.iq.dataverse.Dataverse.DataverseType; import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevel; import edu.harvard.iq.dataverse.authorization.Permission; -import edu.harvard.iq.dataverse.batch.util.LoggingUtil; + import static edu.harvard.iq.dataverse.dataverse.DataverseUtil.validateDataverseMetadataExternally; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; @@ -14,14 +14,11 @@ import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; -import edu.harvard.iq.dataverse.search.IndexResponse; -import java.io.IOException; + import java.util.ArrayList; import java.util.List; -import java.util.concurrent.Future; import java.util.logging.Logger; -import javax.persistence.TypedQuery; -import org.apache.solr.client.solrj.SolrServerException; +import jakarta.persistence.TypedQuery; /** * Update an existing dataverse. diff --git a/src/main/java/edu/harvard/iq/dataverse/export/DCTermsExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/DCTermsExporter.java index 8df17e71ae6..f82c0d9ad3d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/DCTermsExporter.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/DCTermsExporter.java @@ -12,7 +12,7 @@ import java.util.Locale; import java.util.Optional; -import javax.json.JsonObject; +import jakarta.json.JsonObject; import javax.xml.stream.XMLStreamException; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java index 59ff539af37..5119b4b96c7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java @@ -33,14 +33,14 @@ import java.util.logging.Logger; import java.util.logging.Level; import java.io.OutputStream; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.ejb.TransactionAttribute; -import javax.ejb.TransactionAttributeType; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.ejb.TransactionAttribute; +import jakarta.ejb.TransactionAttributeType; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; import javax.xml.stream.XMLStreamWriter; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLOutputFactory; diff --git a/src/main/java/edu/harvard/iq/dataverse/export/DDIExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/DDIExporter.java index 66a9ea6c665..d48ce3a537d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/DDIExporter.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/DDIExporter.java @@ -2,7 +2,6 @@ package edu.harvard.iq.dataverse.export; import com.google.auto.service.AutoService; -import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.export.ddi.DdiExportUtil; import io.gdcc.spi.export.ExportDataProvider; import io.gdcc.spi.export.ExportException; @@ -13,7 +12,7 @@ import java.util.Locale; import java.util.Optional; -import javax.json.JsonObject; +import jakarta.json.JsonObject; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamWriter; import javax.xml.stream.XMLOutputFactory; diff --git a/src/main/java/edu/harvard/iq/dataverse/export/DublinCoreExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/DublinCoreExporter.java index 04b7892d737..0fa32dd4bfa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/DublinCoreExporter.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/DublinCoreExporter.java @@ -2,7 +2,6 @@ package edu.harvard.iq.dataverse.export; import com.google.auto.service.AutoService; -import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.export.dublincore.DublinCoreExportUtil; import io.gdcc.spi.export.ExportDataProvider; import io.gdcc.spi.export.ExportException; @@ -13,7 +12,7 @@ import java.util.Locale; import java.util.Optional; -import javax.json.JsonObject; +import jakarta.json.JsonObject; import javax.xml.stream.XMLStreamException; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java b/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java index eed84a19a66..8342e7df92a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java @@ -9,7 +9,6 @@ import static edu.harvard.iq.dataverse.dataaccess.DataAccess.getStorageIO; import edu.harvard.iq.dataverse.dataaccess.DataAccessOption; import edu.harvard.iq.dataverse.dataaccess.StorageIO; -import io.gdcc.spi.export.ExportDataProvider; import io.gdcc.spi.export.ExportException; import io.gdcc.spi.export.Exporter; import io.gdcc.spi.export.XMLExporter; @@ -47,7 +46,7 @@ import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ws.rs.core.MediaType; +import jakarta.ws.rs.core.MediaType; import org.apache.commons.io.IOUtils; diff --git a/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java index 1b449060b17..9d0b107299e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java @@ -1,23 +1,18 @@ package edu.harvard.iq.dataverse.export; import com.google.auto.service.AutoService; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.export.ddi.DdiExportUtil; import io.gdcc.spi.export.ExportDataProvider; import io.gdcc.spi.export.ExportException; import io.gdcc.spi.export.Exporter; import edu.harvard.iq.dataverse.util.BundleUtil; -import javax.json.JsonObject; -import javax.ws.rs.core.MediaType; +import jakarta.json.JsonObject; +import jakarta.ws.rs.core.MediaType; import javax.xml.stream.XMLStreamException; -import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.nio.file.Path; -import java.nio.file.Paths; import java.util.Locale; import java.util.Optional; diff --git a/src/main/java/edu/harvard/iq/dataverse/export/InternalExportDataProvider.java b/src/main/java/edu/harvard/iq/dataverse/export/InternalExportDataProvider.java index 7c76c4972a8..a7967f6ccb6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/InternalExportDataProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/InternalExportDataProvider.java @@ -3,11 +3,11 @@ import java.io.InputStream; import java.util.Optional; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; import edu.harvard.iq.dataverse.DOIDataCiteRegisterService; import edu.harvard.iq.dataverse.DataCitation; diff --git a/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java index 30347017071..a54e61c7c1e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java @@ -2,7 +2,6 @@ package edu.harvard.iq.dataverse.export; import com.google.auto.service.AutoService; -import edu.harvard.iq.dataverse.DatasetVersion; import io.gdcc.spi.export.ExportDataProvider; import io.gdcc.spi.export.ExportException; import io.gdcc.spi.export.Exporter; @@ -11,8 +10,8 @@ import java.util.Locale; import java.util.Optional; -import javax.json.JsonObject; -import javax.ws.rs.core.MediaType; +import jakarta.json.JsonObject; +import jakarta.ws.rs.core.MediaType; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/export/OAI_DDIExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/OAI_DDIExporter.java index de97a88d1fa..0b4121c6025 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/OAI_DDIExporter.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/OAI_DDIExporter.java @@ -2,7 +2,6 @@ package edu.harvard.iq.dataverse.export; import com.google.auto.service.AutoService; -import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.export.ddi.DdiExportUtil; import io.gdcc.spi.export.ExportDataProvider; import io.gdcc.spi.export.ExportException; @@ -13,7 +12,7 @@ import java.util.Locale; import java.util.Optional; -import javax.json.JsonObject; +import jakarta.json.JsonObject; import javax.xml.stream.XMLStreamException; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java index 06fee32e220..feec4403570 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java @@ -1,21 +1,18 @@ package edu.harvard.iq.dataverse.export; import com.google.auto.service.AutoService; -import edu.harvard.iq.dataverse.DatasetVersion; import io.gdcc.spi.export.ExportDataProvider; import io.gdcc.spi.export.ExportException; import io.gdcc.spi.export.Exporter; -import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; -import edu.harvard.iq.dataverse.util.bagit.OREMap; + import java.io.OutputStream; import java.util.Locale; import java.util.Optional; -import java.util.ResourceBundle; import java.util.logging.Logger; -import javax.json.JsonObject; -import javax.ws.rs.core.MediaType; +import jakarta.json.JsonObject; +import jakarta.ws.rs.core.MediaType; @AutoService(Exporter.class) public class OAI_OREExporter implements Exporter { diff --git a/src/main/java/edu/harvard/iq/dataverse/export/OpenAireExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/OpenAireExporter.java index 03b8ebfa76f..8bd4ae6a042 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/OpenAireExporter.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/OpenAireExporter.java @@ -3,12 +3,11 @@ import java.io.OutputStream; import java.util.Locale; -import javax.json.JsonObject; +import jakarta.json.JsonObject; import javax.xml.stream.XMLStreamException; import com.google.auto.service.AutoService; -import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.export.openaire.OpenAireExportUtil; import io.gdcc.spi.export.ExportDataProvider; import io.gdcc.spi.export.ExportException; diff --git a/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java index f3be1e83d2d..5428715b905 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java @@ -9,7 +9,7 @@ import java.io.OutputStream; import java.util.Locale; import java.util.logging.Logger; -import javax.ws.rs.core.MediaType; +import jakarta.ws.rs.core.MediaType; /** * Schema.org JSON-LD is used by Google Dataset Search and other services to diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index 8f2aa5d023a..24449e8010a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -3,26 +3,13 @@ import com.google.gson.Gson; import edu.harvard.iq.dataverse.ControlledVocabularyValue; -import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.DataTable; import edu.harvard.iq.dataverse.DatasetFieldConstant; -import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.DvObjectContainer; -import edu.harvard.iq.dataverse.FileMetadata; -import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.api.dto.DatasetDTO; import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO; import edu.harvard.iq.dataverse.api.dto.FieldDTO; import edu.harvard.iq.dataverse.api.dto.FileDTO; import edu.harvard.iq.dataverse.api.dto.MetadataBlockDTO; -import edu.harvard.iq.dataverse.datavariable.VariableMetadata; -import edu.harvard.iq.dataverse.datavariable.DataVariable; -import edu.harvard.iq.dataverse.datavariable.VariableServiceBean; -import edu.harvard.iq.dataverse.datavariable.VariableRange; -import edu.harvard.iq.dataverse.datavariable.SummaryStatistic; -import edu.harvard.iq.dataverse.datavariable.VariableCategory; -import edu.harvard.iq.dataverse.datavariable.VarGroup; -import edu.harvard.iq.dataverse.datavariable.CategoryMetadata; import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.LEVEL_FILE; import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_SUBJECT_TAG; @@ -34,43 +21,36 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import edu.harvard.iq.dataverse.util.BundleUtil; -import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.SystemConfig; -import edu.harvard.iq.dataverse.util.json.JsonPrinter; import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.util.xml.XmlPrinter; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.file.Files; -import java.nio.file.Path; import java.nio.file.Paths; import java.time.LocalDate; import java.util.*; import java.util.Map.Entry; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObject; -import javax.json.JsonString; -import javax.json.JsonValue; +import jakarta.ejb.EJB; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObject; +import jakarta.json.JsonString; +import jakarta.json.JsonValue; import javax.xml.stream.XMLOutputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamWriter; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.FactoryConfigurationError; import javax.xml.parsers.ParserConfigurationException; import org.xml.sax.SAXException; -import org.xml.sax.SAXParseException; import org.w3c.dom.Document; import org.apache.commons.lang3.StringUtils; -import org.w3c.dom.DOMException; // For write operation import javax.xml.transform.Transformer; @@ -80,9 +60,7 @@ import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamSource; import javax.xml.transform.stream.StreamResult; -import java.io.File; import java.io.InputStream; -import java.io.InputStreamReader; public class DdiExportUtil { @@ -2086,7 +2064,7 @@ private static void createFileDscr(XMLStreamWriter xmlw, JsonArray fileDetails) if (fileJson.containsKey("tabularTags")) { JsonArray tags = fileJson.getJsonArray("tabularTags"); - for (int j = 0; j < tags.size(); i++) { + for (int j = 0; j < tags.size(); j++) { xmlw.writeStartElement("notes"); writeAttribute(xmlw, "level", LEVEL_FILE); writeAttribute(xmlw, "type", NOTE_TYPE_TAG); diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilHelper.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilHelper.java index 149c6791a7e..d0cd8a4cae7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilHelper.java @@ -1,9 +1,9 @@ package edu.harvard.iq.dataverse.export.ddi; -import javax.annotation.PostConstruct; -import javax.ejb.EJB; -import javax.ejb.Singleton; -import javax.ejb.Startup; +import jakarta.annotation.PostConstruct; +import jakarta.ejb.EJB; +import jakarta.ejb.Singleton; +import jakarta.ejb.Startup; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; diff --git a/src/main/java/edu/harvard/iq/dataverse/export/dublincore/DublinCoreExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/dublincore/DublinCoreExportUtil.java index 238cea78fb5..6b7cb844f3e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/dublincore/DublinCoreExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/dublincore/DublinCoreExportUtil.java @@ -14,18 +14,16 @@ import edu.harvard.iq.dataverse.api.dto.LicenseDTO; import edu.harvard.iq.dataverse.api.dto.MetadataBlockDTO; import edu.harvard.iq.dataverse.export.ddi.DdiExportUtil; -import edu.harvard.iq.dataverse.license.License; import edu.harvard.iq.dataverse.pidproviders.PidUtil; import edu.harvard.iq.dataverse.util.json.JsonUtil; -import java.io.ByteArrayOutputStream; + import java.io.OutputStream; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.logging.Level; import java.util.logging.Logger; -import javax.json.JsonObject; +import jakarta.json.JsonObject; import javax.xml.stream.XMLOutputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamWriter; diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java index 6e1e61ebd06..f7e75ca03fa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java @@ -8,7 +8,7 @@ import java.util.Set; import java.util.logging.Logger; -import javax.json.JsonObject; +import jakarta.json.JsonObject; import javax.xml.stream.XMLOutputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamWriter; @@ -21,7 +21,6 @@ import edu.harvard.iq.dataverse.DatasetFieldConstant; import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.HandlenetServiceBean; -import edu.harvard.iq.dataverse.TermsOfUseAndAccess; import edu.harvard.iq.dataverse.api.dto.DatasetDTO; import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO; import edu.harvard.iq.dataverse.api.dto.FieldDTO; @@ -31,8 +30,8 @@ import edu.harvard.iq.dataverse.util.json.JsonUtil; import java.util.regex.Matcher; import java.util.regex.Pattern; -import javax.mail.internet.AddressException; -import javax.mail.internet.InternetAddress; +import jakarta.mail.internet.AddressException; +import jakarta.mail.internet.InternetAddress; public class OpenAireExportUtil { diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java index 0a238eb5198..7f1f46c06cb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java @@ -6,20 +6,20 @@ import java.io.Serializable; import java.util.Arrays; import java.util.List; -import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObjectBuilder; -import javax.persistence.CascadeType; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.EnumType; -import javax.persistence.Enumerated; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.OneToMany; + +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObjectBuilder; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.OneToMany; /** * A specification or definition for how an external tool is intended to @@ -148,7 +148,8 @@ public enum Type { EXPLORE("explore"), CONFIGURE("configure"), - PREVIEW("preview"); + PREVIEW("preview"), + QUERY("query"); private final String text; @@ -238,14 +239,30 @@ public void setExternalToolTypes(List externalToolTypes) { } public boolean isExploreTool() { - boolean isExploreTool = false; for (ExternalToolType externalToolType : externalToolTypes) { if (externalToolType.getType().equals(Type.EXPLORE)) { - isExploreTool = true; - break; + return true; + } + } + return false; + } + + public boolean isQueryTool() { + for (ExternalToolType externalToolType : externalToolTypes) { + if (externalToolType.getType().equals(Type.QUERY)) { + return true; + } + } + return false; + } + + public boolean isPreviewTool() { + for (ExternalToolType externalToolType : externalToolTypes) { + if (externalToolType.getType().equals(Type.PREVIEW)) { + return true; } } - return isExploreTool; + return false; } public Scope getScope() { diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java index dac046373ba..a52679deebc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java @@ -22,15 +22,15 @@ import java.util.logging.Level; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonArrayBuilder; -import javax.json.JsonNumber; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.json.JsonString; -import javax.json.JsonValue; -import javax.ws.rs.HttpMethod; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonNumber; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.json.JsonString; +import jakarta.json.JsonValue; +import jakarta.ws.rs.HttpMethod; import org.apache.commons.codec.binary.StringUtils; diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java index f38cd7301ee..e13843eadfa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java @@ -6,34 +6,31 @@ import edu.harvard.iq.dataverse.DataFileServiceBean; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.externaltools.ExternalTool.Type; -import edu.harvard.iq.dataverse.util.URLTokenUtil; import edu.harvard.iq.dataverse.util.URLTokenUtil.ReservedWord; import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.externaltools.ExternalTool.Scope; -import java.io.StringReader; import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.logging.Logger; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.json.JsonReader; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.NonUniqueResultException; -import javax.persistence.PersistenceContext; -import javax.persistence.TypedQuery; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.json.JsonReader; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.NonUniqueResultException; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.TypedQuery; import static edu.harvard.iq.dataverse.externaltools.ExternalTool.*; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import javax.ejb.EJB; -import javax.json.JsonValue; + +import jakarta.ejb.EJB; +import jakarta.json.JsonValue; @Stateless @Named @@ -62,7 +59,7 @@ public List findDatasetToolsByType(Type type) { } /** - * @param type explore, configure or preview + * @param type explore, configure or preview, query * @return A list of tools or an empty list. */ public List findFileToolsByType(Type type) { @@ -71,7 +68,7 @@ public List findFileToolsByType(Type type) { } /** - * @param type explore, configure or preview + * @param type explore, configure or preview, query * @param contentType file content type (MIME type) * @return A list of tools or an empty list. */ diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolType.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolType.java index 3564d1871b5..fb4c0f5dc5d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolType.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolType.java @@ -1,18 +1,18 @@ package edu.harvard.iq.dataverse.externaltools; import java.io.Serializable; -import javax.persistence.CascadeType; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.EnumType; -import javax.persistence.Enumerated; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.Table; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Table; @Entity @Table(indexes = { diff --git a/src/main/java/edu/harvard/iq/dataverse/feedback/Feedback.java b/src/main/java/edu/harvard/iq/dataverse/feedback/Feedback.java index c9acb491aa2..c1162eb8db6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/feedback/Feedback.java +++ b/src/main/java/edu/harvard/iq/dataverse/feedback/Feedback.java @@ -1,7 +1,7 @@ package edu.harvard.iq.dataverse.feedback; -import javax.json.Json; -import javax.json.JsonObjectBuilder; +import jakarta.json.Json; +import jakarta.json.JsonObjectBuilder; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; diff --git a/src/main/java/edu/harvard/iq/dataverse/feedback/FeedbackUtil.java b/src/main/java/edu/harvard/iq/dataverse/feedback/FeedbackUtil.java index 750a3923806..6ae0e165141 100644 --- a/src/main/java/edu/harvard/iq/dataverse/feedback/FeedbackUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/feedback/FeedbackUtil.java @@ -17,8 +17,8 @@ import java.util.List; import java.util.logging.Logger; -import javax.json.JsonObject; -import javax.mail.internet.InternetAddress; +import jakarta.json.JsonObject; +import jakarta.mail.internet.InternetAddress; public class FeedbackUtil { diff --git a/src/main/java/edu/harvard/iq/dataverse/flyway/StartupFlywayMigrator.java b/src/main/java/edu/harvard/iq/dataverse/flyway/StartupFlywayMigrator.java index 71b53bd43f2..39bc46216ca 100644 --- a/src/main/java/edu/harvard/iq/dataverse/flyway/StartupFlywayMigrator.java +++ b/src/main/java/edu/harvard/iq/dataverse/flyway/StartupFlywayMigrator.java @@ -2,12 +2,12 @@ import org.flywaydb.core.Flyway; -import javax.annotation.PostConstruct; -import javax.annotation.Resource; -import javax.ejb.Singleton; -import javax.ejb.Startup; -import javax.ejb.TransactionManagement; -import javax.ejb.TransactionManagementType; +import jakarta.annotation.PostConstruct; +import jakarta.annotation.Resource; +import jakarta.ejb.Singleton; +import jakarta.ejb.Startup; +import jakarta.ejb.TransactionManagement; +import jakarta.ejb.TransactionManagementType; import javax.sql.DataSource; @Startup diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 9d80c5cc280..d2613422be9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -4,19 +4,19 @@ import com.google.gson.GsonBuilder; import edu.harvard.iq.dataverse.*; -import javax.ejb.Asynchronous; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.ejb.TransactionAttribute; -import javax.ejb.TransactionAttributeType; -import javax.inject.Inject; -import javax.inject.Named; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObject; -import javax.json.JsonPatch; -import javax.servlet.http.HttpServletRequest; +import jakarta.ejb.Asynchronous; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.ejb.TransactionAttribute; +import jakarta.ejb.TransactionAttributeType; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObject; +import jakarta.json.JsonPatch; +import jakarta.servlet.http.HttpServletRequest; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray; diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java index 50d06807a13..ba6f5c3dec2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java @@ -7,14 +7,14 @@ import java.io.Serializable; import java.util.Date; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Temporal; +import jakarta.persistence.TemporalType; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java index 40bd45ecb30..20884e3360c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java @@ -21,13 +21,13 @@ import java.util.logging.FileHandler; import java.util.logging.Level; import java.util.logging.Logger; -import javax.annotation.Resource; -import javax.ejb.Asynchronous; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.ejb.Stateless; -import javax.ejb.Timer; -import javax.inject.Named; +import jakarta.annotation.Resource; +import jakarta.ejb.Asynchronous; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.ejb.Stateless; +import jakarta.ejb.Timer; +import jakarta.inject.Named; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import org.apache.commons.lang3.mutable.MutableBoolean; @@ -51,8 +51,8 @@ import java.nio.file.Files; import java.nio.file.Paths; import java.nio.file.Path; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; /** * @@ -69,7 +69,7 @@ public class HarvesterServiceBean { @EJB DatasetServiceBean datasetService; @Resource - javax.ejb.TimerService timerService; + jakarta.ejb.TimerService timerService; @EJB DataverseTimerServiceBean dataverseTimerService; @EJB @@ -148,12 +148,12 @@ public void doHarvest(DataverseRequest dataverseRequest, Long harvestingClientId String logTimestamp = logFormatter.format(new Date()); Logger hdLogger = Logger.getLogger("edu.harvard.iq.dataverse.harvest.client.HarvesterServiceBean." + harvestingClientConfig.getName() + logTimestamp); - String logFileName = "../logs" + File.separator + "harvest_" + harvestingClientConfig.getName() + "_" + logTimestamp + ".log"; + String logFileName = System.getProperty("com.sun.aas.instanceRoot") + File.separator + "logs" + File.separator + "harvest_" + harvestingClientConfig.getName() + "_" + logTimestamp + ".log"; FileHandler fileHandler = new FileHandler(logFileName); hdLogger.setUseParentHandlers(false); hdLogger.addHandler(fileHandler); - PrintWriter importCleanupLog = new PrintWriter(new FileWriter( "../logs/harvest_cleanup_" + harvestingClientConfig.getName() + "_" + logTimestamp+".txt")); + PrintWriter importCleanupLog = new PrintWriter(new FileWriter(System.getProperty("com.sun.aas.instanceRoot") + File.separator + "logs/harvest_cleanup_" + harvestingClientConfig.getName() + "_" + logTimestamp + ".txt")); List harvestedDatasetIds = new ArrayList<>(); diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java index d27ddc41b7f..40db55f2a0c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java @@ -17,25 +17,25 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import javax.persistence.CascadeType; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.OneToMany; -import javax.persistence.OneToOne; -import javax.persistence.OrderBy; -import javax.persistence.Table; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; -import javax.validation.constraints.Pattern; -import javax.validation.constraints.Size; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.OneToMany; +import jakarta.persistence.OneToOne; +import jakarta.persistence.OrderBy; +import jakarta.persistence.Table; +import jakarta.persistence.Temporal; +import jakarta.persistence.TemporalType; +import jakarta.validation.constraints.Pattern; +import jakarta.validation.constraints.Size; import org.hibernate.validator.constraints.NotBlank; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java index 13cc44ce919..7ec6d75a41c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java @@ -5,25 +5,23 @@ import edu.harvard.iq.dataverse.DataverseRequestServiceBean; import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.EjbDataverseEngine; -import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -import edu.harvard.iq.dataverse.engine.command.impl.DeleteHarvestingClientCommand; import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.timer.DataverseTimerServiceBean; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.logging.Logger; -import javax.ejb.Asynchronous; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.ejb.TransactionAttribute; -import javax.ejb.TransactionAttributeType; -import javax.inject.Inject; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.NonUniqueResultException; -import javax.persistence.PersistenceContext; +import jakarta.ejb.Asynchronous; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.ejb.TransactionAttribute; +import jakarta.ejb.TransactionAttributeType; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.NonUniqueResultException; +import jakarta.persistence.PersistenceContext; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecord.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecord.java index 49e40e786ea..94753d8594d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecord.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecord.java @@ -21,12 +21,12 @@ import java.io.Serializable; import java.util.Date; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Temporal; +import jakarta.persistence.TemporalType; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java index e73dbb9fc07..1b4a7bc7db0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java @@ -13,27 +13,22 @@ import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import java.time.Instant; -import java.io.File; -import java.io.IOException; -import java.sql.Timestamp; -import java.text.SimpleDateFormat; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.logging.FileHandler; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.ejb.TransactionAttribute; -import static javax.ejb.TransactionAttributeType.REQUIRES_NEW; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.TypedQuery; -import javax.persistence.TemporalType; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.ejb.TransactionAttribute; +import static jakarta.ejb.TransactionAttributeType.REQUIRES_NEW; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.TypedQuery; +import jakarta.persistence.TemporalType; /** * @@ -265,7 +260,7 @@ public OAIRecord findOAIRecordBySetNameandGlobalId(String setName, String global try { oaiRecord = (OAIRecord) query.setMaxResults(1).getSingleResult(); - } catch (javax.persistence.NoResultException e) { + } catch (jakarta.persistence.NoResultException e) { // Do nothing, just return null. } logger.fine("returning oai record."); diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISet.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISet.java index 038bb66de32..8d6b04effef 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISet.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISet.java @@ -20,16 +20,16 @@ package edu.harvard.iq.dataverse.harvest.server; import java.io.Serializable; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.OneToOne; -import javax.persistence.Version; -import javax.validation.constraints.Pattern; -import javax.validation.constraints.Size; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.OneToOne; +import jakarta.persistence.Version; +import jakarta.validation.constraints.Pattern; +import jakarta.validation.constraints.Size; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java index 6b28c8808a0..2bd666401c7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java @@ -17,19 +17,17 @@ import java.util.logging.FileHandler; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.Asynchronous; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.ejb.TransactionAttribute; -import javax.ejb.TransactionAttributeType; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import org.apache.solr.client.solrj.SolrClient; +import jakarta.ejb.Asynchronous; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.ejb.TransactionAttribute; +import jakarta.ejb.TransactionAttributeType; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.impl.HttpSolrClient; -import org.apache.solr.client.solrj.impl.HttpSolrClient.RemoteSolrException; +import org.apache.solr.client.solrj.impl.BaseHttpSolrClient.RemoteSolrException; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java index 9cf1629abfc..96a19acc0e8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java @@ -38,17 +38,16 @@ import java.io.IOException; -import java.time.Instant; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.inject.Inject; +import jakarta.ejb.EJB; +import jakarta.inject.Inject; import org.eclipse.microprofile.config.inject.ConfigProperty; -import javax.mail.internet.InternetAddress; -import javax.servlet.ServletConfig; -import javax.servlet.ServletException; -import javax.servlet.http.HttpServlet; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; +import jakarta.mail.internet.InternetAddress; +import jakarta.servlet.ServletConfig; +import jakarta.servlet.ServletException; +import jakarta.servlet.http.HttpServlet; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; import javax.xml.stream.XMLStreamException; import org.eclipse.microprofile.config.Config; import org.eclipse.microprofile.config.ConfigProvider; diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java index 77ec6701bc6..f56fe608a52 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java @@ -29,15 +29,15 @@ import java.time.Instant; import java.util.Iterator; import java.util.logging.Logger; -import javax.ejb.ActivationConfigProperty; -import javax.ejb.EJB; -import javax.ejb.MessageDriven; -import javax.ejb.TransactionAttribute; -import javax.ejb.TransactionAttributeType; -import javax.jms.JMSException; -import javax.jms.Message; -import javax.jms.MessageListener; -import javax.jms.ObjectMessage; +import jakarta.ejb.ActivationConfigProperty; +import jakarta.ejb.EJB; +import jakarta.ejb.MessageDriven; +import jakarta.ejb.TransactionAttribute; +import jakarta.ejb.TransactionAttributeType; +import jakarta.jms.JMSException; +import jakarta.jms.Message; +import jakarta.jms.MessageListener; +import jakarta.jms.ObjectMessage; /** * * This is an experimental, JMS-based implementation of asynchronous @@ -49,7 +49,7 @@ mappedName = "java:app/jms/queue/ingest", activationConfig = { @ActivationConfigProperty(propertyName = "acknowledgeMode", propertyValue = "Auto-acknowledge"), - @ActivationConfigProperty(propertyName = "destinationType", propertyValue = "javax.jms.Queue") + @ActivationConfigProperty(propertyName = "destinationType", propertyValue = "jakarta.jms.Queue") } ) diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestQueueProducer.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestQueueProducer.java index 1ba63207208..0fed25e5c88 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestQueueProducer.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestQueueProducer.java @@ -1,19 +1,19 @@ package edu.harvard.iq.dataverse.ingest; -import javax.annotation.Resource; +import jakarta.annotation.Resource; // https://www.baeldung.com/jee-cdi-vs-ejb-singleton -import javax.inject.Singleton; -import javax.enterprise.inject.Produces; -import javax.jms.JMSConnectionFactoryDefinition; -import javax.jms.JMSDestinationDefinition; -import javax.jms.Queue; -import javax.jms.QueueConnectionFactory; +import jakarta.inject.Singleton; +import jakarta.enterprise.inject.Produces; +import jakarta.jms.JMSConnectionFactoryDefinition; +import jakarta.jms.JMSDestinationDefinition; +import jakarta.jms.Queue; +import jakarta.jms.QueueConnectionFactory; @JMSConnectionFactoryDefinition( description = "Dataverse Ingest Queue Factory", name = "java:app/jms/factory/ingest", resourceAdapter = "jmsra", - interfaceName = "javax.jms.QueueConnectionFactory", + interfaceName = "jakarta.jms.QueueConnectionFactory", maxPoolSize = 250, minPoolSize = 1, properties = { @@ -25,7 +25,7 @@ description = "Dataverse Ingest Queue", name = "java:app/jms/queue/ingest", resourceAdapter = "jmsra", - interfaceName="javax.jms.Queue", + interfaceName="jakarta.jms.Queue", destinationName = "DataverseIngest" ) @Singleton diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestReport.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestReport.java index 31208abf839..a1a8bde77f4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestReport.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestReport.java @@ -9,17 +9,17 @@ import edu.harvard.iq.dataverse.DataFile; import java.io.Serializable; import java.util.Date; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.Lob; -import javax.persistence.ManyToOne; -import javax.persistence.Table; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.Lob; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Table; +import jakarta.persistence.Temporal; +import jakarta.persistence.TemporalType; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestRequest.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestRequest.java index 024e90325c3..a5d6a1af75c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestRequest.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestRequest.java @@ -7,16 +7,16 @@ import edu.harvard.iq.dataverse.DataFile; import java.io.Serializable; -import javax.persistence.CascadeType; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.OneToOne; -import javax.persistence.Table; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.OneToOne; +import jakarta.persistence.Table; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index c1e82042898..40dc3d6fdd6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -107,20 +107,20 @@ import java.util.logging.Logger; import java.util.Hashtable; import java.util.Optional; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.jms.Queue; -import javax.jms.QueueConnectionFactory; -import javax.annotation.Resource; -import javax.ejb.Asynchronous; -import javax.jms.JMSException; -import javax.jms.QueueConnection; -import javax.jms.QueueSender; -import javax.jms.QueueSession; -import javax.jms.Message; -import javax.faces.application.FacesMessage; -import javax.ws.rs.core.MediaType; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.jms.Queue; +import jakarta.jms.QueueConnectionFactory; +import jakarta.annotation.Resource; +import jakarta.ejb.Asynchronous; +import jakarta.jms.JMSException; +import jakarta.jms.QueueConnection; +import jakarta.jms.QueueSender; +import jakarta.jms.QueueSession; +import jakarta.jms.Message; +import jakarta.faces.application.FacesMessage; +import jakarta.ws.rs.core.MediaType; import ucar.nc2.NetcdfFile; import ucar.nc2.NetcdfFiles; diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceShapefileHelper.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceShapefileHelper.java index 9ea2cd0343f..8c5dad237b1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceShapefileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceShapefileHelper.java @@ -16,7 +16,7 @@ import java.io.IOException; import java.util.List; import java.util.logging.Logger; -//import javax.ejb.EJB; +//import jakarta.ejb.EJB; /** * Used by the IngestServiceBean to redistribute a zipped Shapefile* diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java index 368824680c0..3d30f7e6ec3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java @@ -20,15 +20,12 @@ package edu.harvard.iq.dataverse.ingest; import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.util.FileUtil; import java.io.File; -import java.io.FileWriter; import java.io.IOException; -import java.io.PrintWriter; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; @@ -36,9 +33,9 @@ import java.util.List; import java.util.Set; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObjectBuilder; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObjectBuilder; import org.dataverse.unf.UNFUtil; import org.dataverse.unf.UnfException; diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java index 6d17a5bd553..eb1353fd792 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java @@ -21,14 +21,12 @@ import java.io.*; -import java.io.FileReader; import java.io.InputStreamReader; import java.text.*; import java.util.logging.*; import java.util.*; -import java.security.NoSuchAlgorithmException; -import javax.inject.Inject; +import jakarta.inject.Inject; // Rosuda Wrappers and Methods for R-calls to Rserve import edu.harvard.iq.dataverse.settings.JvmSettings; @@ -43,18 +41,14 @@ import edu.harvard.iq.dataverse.datavariable.DataVariable; import edu.harvard.iq.dataverse.datavariable.VariableCategory; -import edu.harvard.iq.dataverse.ingest.plugin.spi.*; import edu.harvard.iq.dataverse.ingest.tabulardata.TabularDataFileReader; import edu.harvard.iq.dataverse.ingest.tabulardata.spi.TabularDataFileReaderSpi; import edu.harvard.iq.dataverse.ingest.tabulardata.TabularDataIngest; import edu.harvard.iq.dataverse.rserve.*; -import javax.naming.Context; -import javax.naming.InitialContext; -import javax.naming.NamingException; import org.apache.commons.lang3.RandomStringUtils; -import org.apache.commons.lang3.ArrayUtils; + /** * Dataverse 4.0 implementation of TabularDataFileReader for the * RData Binary Format. diff --git a/src/main/java/edu/harvard/iq/dataverse/license/License.java b/src/main/java/edu/harvard/iq/dataverse/license/License.java index c6e2cdbc2e5..fe19073ab8d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/license/License.java +++ b/src/main/java/edu/harvard/iq/dataverse/license/License.java @@ -1,15 +1,15 @@ package edu.harvard.iq.dataverse.license; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.OneToMany; -import javax.persistence.Table; -import javax.persistence.UniqueConstraint; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.OneToMany; +import jakarta.persistence.Table; +import jakarta.persistence.UniqueConstraint; import edu.harvard.iq.dataverse.TermsOfUseAndAccess; diff --git a/src/main/java/edu/harvard/iq/dataverse/license/LicenseConverter.java b/src/main/java/edu/harvard/iq/dataverse/license/LicenseConverter.java index 4f874132128..26b7ca63a29 100644 --- a/src/main/java/edu/harvard/iq/dataverse/license/LicenseConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/license/LicenseConverter.java @@ -6,11 +6,11 @@ package edu.harvard.iq.dataverse.license; -import javax.enterprise.inject.spi.CDI; -import javax.faces.component.UIComponent; -import javax.faces.context.FacesContext; -import javax.faces.convert.Converter; -import javax.faces.convert.FacesConverter; +import jakarta.enterprise.inject.spi.CDI; +import jakarta.faces.component.UIComponent; +import jakarta.faces.context.FacesContext; +import jakarta.faces.convert.Converter; +import jakarta.faces.convert.FacesConverter; @FacesConverter("licenseConverter") public class LicenseConverter implements Converter { diff --git a/src/main/java/edu/harvard/iq/dataverse/license/LicenseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/license/LicenseServiceBean.java index 9704e4b92dc..93f4958038c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/license/LicenseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/license/LicenseServiceBean.java @@ -5,13 +5,13 @@ import edu.harvard.iq.dataverse.api.AbstractApiBean.WrappedResponse; import static edu.harvard.iq.dataverse.dataset.DatasetUtil.getLocalizedLicenseName; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.PersistenceContext; -import javax.persistence.PersistenceException; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.PersistenceException; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; diff --git a/src/main/java/edu/harvard/iq/dataverse/locality/DvObjectStorageLocation.java b/src/main/java/edu/harvard/iq/dataverse/locality/DvObjectStorageLocation.java index 33486128e7b..5844c8b6ab9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/locality/DvObjectStorageLocation.java +++ b/src/main/java/edu/harvard/iq/dataverse/locality/DvObjectStorageLocation.java @@ -2,11 +2,11 @@ import edu.harvard.iq.dataverse.DvObject; import java.io.Serializable; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.OneToOne; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.OneToOne; /** * Future use, maybe. Once we're happy with the design we'll enable it as an diff --git a/src/main/java/edu/harvard/iq/dataverse/locality/StorageSite.java b/src/main/java/edu/harvard/iq/dataverse/locality/StorageSite.java index d873b9f8989..c074cb5918f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/locality/StorageSite.java +++ b/src/main/java/edu/harvard/iq/dataverse/locality/StorageSite.java @@ -2,13 +2,13 @@ import java.io.Serializable; import java.util.Objects; -import javax.json.Json; -import javax.json.JsonObjectBuilder; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; +import jakarta.json.Json; +import jakarta.json.JsonObjectBuilder; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; @Entity public class StorageSite implements Serializable { diff --git a/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteServiceBean.java index c7057ab9318..781e896e9a7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteServiceBean.java @@ -2,12 +2,12 @@ import java.util.List; import java.util.logging.Logger; -import javax.ejb.Stateless; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.NonUniqueResultException; -import javax.persistence.PersistenceContext; -import javax.persistence.TypedQuery; +import jakarta.ejb.Stateless; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.NonUniqueResultException; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.TypedQuery; @Stateless public class StorageSiteServiceBean { diff --git a/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteUtil.java b/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteUtil.java index ebc2bb0f19f..6ff0f7ca379 100644 --- a/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteUtil.java @@ -2,7 +2,7 @@ import edu.harvard.iq.dataverse.util.SystemConfig; import java.util.List; -import javax.json.JsonObject; +import jakarta.json.JsonObject; public class StorageSiteUtil { diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitations.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitations.java index 3c1c0bc0c68..469f3abe9da 100644 --- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitations.java +++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitations.java @@ -7,14 +7,14 @@ import edu.harvard.iq.dataverse.Dataset; import java.io.Serializable; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.validation.constraints.NotNull; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.validation.constraints.NotNull; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitationsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitationsServiceBean.java index c05fc9b1a4e..50c24274bb2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitationsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitationsServiceBean.java @@ -10,16 +10,16 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.json.JsonArray; -import javax.json.JsonObject; -import javax.json.JsonValue; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.json.JsonArray; +import jakarta.json.JsonObject; +import jakarta.json.JsonValue; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetrics.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetrics.java index fe0565c3ff8..ac3dff356eb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetrics.java +++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetrics.java @@ -3,15 +3,15 @@ import edu.harvard.iq.dataverse.Dataset; import java.io.Serializable; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.Transient; -import javax.validation.constraints.NotNull; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Transient; +import jakarta.validation.constraints.NotNull; /** * Cached versions of views, downloads, and citations to show in the UI and API. diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java index 39afdf318ad..0925c164bf4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java @@ -5,26 +5,23 @@ import edu.harvard.iq.dataverse.DatasetServiceBean; import java.io.StringReader; import java.math.BigDecimal; -import java.sql.Timestamp; import java.util.ArrayList; -import java.util.Date; import java.util.List; import java.util.ListIterator; import java.util.Set; -import java.util.concurrent.Future; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonObject; -import javax.json.JsonReader; -import javax.json.JsonValue; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonObject; +import jakarta.json.JsonReader; +import jakarta.json.JsonValue; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java index e1a635a9d6c..5edf2fde0c3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java @@ -16,14 +16,14 @@ import java.text.SimpleDateFormat; import java.util.Date; import java.util.TimeZone; -import javax.ejb.EJB; -import javax.enterprise.context.RequestScoped; -import javax.faces.context.FacesContext; -import javax.inject.Named; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpSession; -import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.UriInfo; +import jakarta.ejb.EJB; +import jakarta.enterprise.context.RequestScoped; +import jakarta.faces.context.FacesContext; +import jakarta.inject.Named; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpSession; +import jakarta.ws.rs.core.HttpHeaders; +import jakarta.ws.rs.core.UriInfo; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtil.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtil.java index f3d45642083..8f32750f090 100644 --- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtil.java @@ -1,17 +1,16 @@ package edu.harvard.iq.dataverse.makedatacount; import java.util.ArrayList; -import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; -import javax.json.JsonArray; -import javax.json.JsonObject; -import javax.json.JsonValue; -import javax.persistence.Transient; +import jakarta.json.JsonArray; +import jakarta.json.JsonObject; +import jakarta.json.JsonValue; +import jakarta.persistence.Transient; /** * See doc/sphinx-guides/source/admin/make-data-count.rst for user facing docs diff --git a/src/main/java/edu/harvard/iq/dataverse/metadataimport/ForeignMetadataImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/metadataimport/ForeignMetadataImportServiceBean.java index 88af8478a8f..33f8277919a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/metadataimport/ForeignMetadataImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/metadataimport/ForeignMetadataImportServiceBean.java @@ -2,7 +2,6 @@ package edu.harvard.iq.dataverse.metadataimport; -import edu.harvard.iq.dataverse.ControlledVocabularyValue; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.DatasetField; import edu.harvard.iq.dataverse.DatasetFieldCompoundValue; @@ -17,13 +16,13 @@ import java.io.IOException; import java.util.logging.Logger; import java.io.StringReader; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.PersistenceContext; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.PersistenceContext; import javax.xml.stream.XMLStreamConstants; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; diff --git a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java index ff5ffee8f85..065b42e5afe 100644 --- a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java @@ -3,7 +3,6 @@ import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.Metric; -import edu.harvard.iq.dataverse.makedatacount.DatasetMetrics; import edu.harvard.iq.dataverse.makedatacount.MakeDataCountUtil.MetricType; import static edu.harvard.iq.dataverse.metrics.MetricsUtil.*; @@ -23,19 +22,19 @@ import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.NonUniqueResultException; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; -import javax.ws.rs.core.UriInfo; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.NonUniqueResultException; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; +import jakarta.ws.rs.core.UriInfo; @Stateless public class MetricsServiceBean implements Serializable { @@ -383,7 +382,7 @@ public JsonArray filesByType(Dataverse d) { jab.add(stats); } - } catch (javax.persistence.NoResultException nr) { + } catch (NoResultException nr) { // do nothing } return jab.build(); @@ -519,7 +518,7 @@ public JsonArray fileDownloads(String yyyymm, Dataverse d, boolean uniqueCounts) job.add(MetricsUtil.COUNT, (long) result[2]); jab.add(job); } - } catch (javax.persistence.NoResultException nr) { + } catch (NoResultException nr) { // do nothing } return jab.build(); @@ -558,7 +557,7 @@ public JsonArray uniqueDatasetDownloads(String yyyymm, Dataverse d) { jab.add(job); } - } catch (javax.persistence.NoResultException nr) { + } catch (NoResultException nr) { // do nothing } return jab.build(); @@ -718,7 +717,7 @@ public Metric getMetric(String name, String dataLocation, String dayString, Data Metric metric = null; try { metric = (Metric) query.getSingleResult(); - } catch (javax.persistence.NoResultException nr) { + } catch (NoResultException nr) { // do nothing logger.fine("No result"); } catch (NonUniqueResultException nur) { diff --git a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsUtil.java b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsUtil.java index 72d8f5402bb..74bb53e1191 100644 --- a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsUtil.java @@ -6,21 +6,19 @@ import java.time.LocalDate; import java.time.YearMonth; import java.time.format.DateTimeFormatter; -import java.time.format.DateTimeFormatterBuilder; import java.time.format.DateTimeParseException; -import java.time.temporal.ChronoField; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.json.JsonReader; -import javax.ws.rs.BadRequestException; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.json.JsonReader; +import jakarta.ws.rs.BadRequestException; public class MetricsUtil { diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java b/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java index d244021d01a..e9898031343 100644 --- a/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java +++ b/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java @@ -25,25 +25,23 @@ import edu.harvard.iq.dataverse.search.SearchException; import edu.harvard.iq.dataverse.search.SearchFields; import edu.harvard.iq.dataverse.search.SortBy; -import java.math.BigDecimal; -import java.util.Arrays; + import java.util.List; import java.util.Map; -import java.util.Random; import java.util.logging.Logger; import java.util.Locale; -import javax.ejb.EJB; -import javax.inject.Inject; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObjectBuilder; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.DefaultValue; -import javax.ws.rs.container.ContainerRequestContext; -import javax.ws.rs.core.Context; +import jakarta.ejb.EJB; +import jakarta.inject.Inject; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObjectBuilder; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.DefaultValue; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.Context; import edu.harvard.iq.dataverse.util.BundleUtil; import org.apache.commons.lang3.StringUtils; diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFilterParams.java b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFilterParams.java index c378034f951..2ab248fcc0b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFilterParams.java +++ b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFilterParams.java @@ -17,9 +17,9 @@ import java.util.HashMap; import java.util.List; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObjectBuilder; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObjectBuilder; import org.apache.commons.lang3.StringUtils; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java index 6acdfd9cdde..917884f3549 100644 --- a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java +++ b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java @@ -18,9 +18,9 @@ import java.util.Map; import java.util.Set; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObjectBuilder; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObjectBuilder; import org.apache.commons.lang3.StringUtils; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataPage.java b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataPage.java index 2567f8b2774..3ae64d9d760 100644 --- a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataPage.java @@ -18,12 +18,12 @@ import java.util.Arrays; import java.util.List; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.faces.context.FacesContext; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; -import javax.servlet.http.HttpServletRequest; +import jakarta.ejb.EJB; +import jakarta.faces.context.FacesContext; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.servlet.http.HttpServletRequest; /* * To change this license header, choose License Headers in Project Properties. diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/Pager.java b/src/main/java/edu/harvard/iq/dataverse/mydata/Pager.java index 4bf13e04284..096974b9d72 100644 --- a/src/main/java/edu/harvard/iq/dataverse/mydata/Pager.java +++ b/src/main/java/edu/harvard/iq/dataverse/mydata/Pager.java @@ -15,9 +15,9 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObjectBuilder; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObjectBuilder; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/RolePermissionHelperPage.java b/src/main/java/edu/harvard/iq/dataverse/mydata/RolePermissionHelperPage.java index 4c596d1bb84..dcb76f42acb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/mydata/RolePermissionHelperPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/mydata/RolePermissionHelperPage.java @@ -3,16 +3,15 @@ import edu.harvard.iq.dataverse.DatasetPage; import edu.harvard.iq.dataverse.DataverseRoleServiceBean; import edu.harvard.iq.dataverse.DataverseSession; -import edu.harvard.iq.dataverse.DvObject; import edu.harvard.iq.dataverse.RoleAssigneeServiceBean; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.DataverseRolePermissionHelper; import java.util.List; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; /* * To change this license header, choose License Headers in Project Properties. diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/RoleTagRetriever.java b/src/main/java/edu/harvard/iq/dataverse/mydata/RoleTagRetriever.java index cf7380a9a9b..e328a50e962 100644 --- a/src/main/java/edu/harvard/iq/dataverse/mydata/RoleTagRetriever.java +++ b/src/main/java/edu/harvard/iq/dataverse/mydata/RoleTagRetriever.java @@ -23,9 +23,8 @@ import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import org.apache.commons.lang3.StringUtils; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; /** * Input: dvObject id, parent Id, and dvObject type (from Solr) diff --git a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetData.java b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetData.java index a3150161c52..c078860ad8e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetData.java +++ b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetData.java @@ -6,19 +6,19 @@ import java.sql.Timestamp; import java.util.Date; import java.util.UUID; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.EnumType; -import javax.persistence.Enumerated; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.OneToOne; -import javax.persistence.Table; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.OneToOne; +import jakarta.persistence.Table; @Table(indexes = {@Index(columnList="token") , @Index(columnList="builtinuser_id")}) diff --git a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java index b9eabf45159..b19721d56bb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java @@ -16,19 +16,19 @@ import edu.harvard.iq.dataverse.util.SystemConfig; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.faces.application.FacesMessage; -import javax.faces.context.FacesContext; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.context.FacesContext; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; import edu.harvard.iq.dataverse.validation.PasswordValidatorServiceBean; import java.util.Arrays; import java.util.Date; import java.util.List; -import javax.faces.component.UIComponent; -import javax.faces.component.UIInput; +import jakarta.faces.component.UIComponent; +import jakarta.faces.component.UIInput; import org.apache.commons.lang3.StringUtils; import org.hibernate.validator.constraints.NotBlank; diff --git a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBean.java index c8db23985d8..5d1c167d2a5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBean.java @@ -10,18 +10,17 @@ import edu.harvard.iq.dataverse.util.SystemConfig; import java.text.MessageFormat; -import java.util.Date; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.NonUniqueResultException; -import javax.persistence.PersistenceContext; -import javax.persistence.TypedQuery; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.NonUniqueResultException; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.TypedQuery; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; @Stateless diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/FakePidProviderServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/FakePidProviderServiceBean.java index 54d64710511..3bd9d9dd022 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/FakePidProviderServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/FakePidProviderServiceBean.java @@ -3,13 +3,13 @@ import edu.harvard.iq.dataverse.DOIServiceBean; import edu.harvard.iq.dataverse.DvObject; import edu.harvard.iq.dataverse.GlobalId; -import java.util.ArrayList; + import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Logger; -import javax.ejb.Stateless; +import jakarta.ejb.Stateless; @Stateless public class FakePidProviderServiceBean extends DOIServiceBean { diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PermaLinkPidProviderServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PermaLinkPidProviderServiceBean.java index f387188b690..d145a7ec106 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PermaLinkPidProviderServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PermaLinkPidProviderServiceBean.java @@ -4,21 +4,17 @@ import edu.harvard.iq.dataverse.DvObject; import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.GlobalIdServiceBean; -import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDatasetCommand; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean.Key; import edu.harvard.iq.dataverse.util.SystemConfig; -import java.lang.StackWalker.StackFrame; -import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Logger; -import java.util.stream.Stream; -import javax.annotation.PostConstruct; -import javax.ejb.Stateless; +import jakarta.annotation.PostConstruct; +import jakarta.ejb.Stateless; /** * PermaLink provider diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidHelper.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidHelper.java index 478f5d6c2c4..5bc855a9593 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidHelper.java @@ -1,10 +1,10 @@ package edu.harvard.iq.dataverse.pidproviders; import java.util.Arrays; -import javax.annotation.PostConstruct; -import javax.ejb.EJB; -import javax.ejb.Singleton; -import javax.ejb.Startup; +import jakarta.annotation.PostConstruct; +import jakarta.ejb.EJB; +import jakarta.ejb.Singleton; +import jakarta.ejb.Startup; import edu.harvard.iq.dataverse.DOIDataCiteServiceBean; import edu.harvard.iq.dataverse.DOIEZIdServiceBean; diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidUtil.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidUtil.java index 4db7d099a47..78305648f67 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidUtil.java @@ -16,13 +16,13 @@ import java.util.Map; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.ws.rs.BadRequestException; -import javax.ws.rs.InternalServerErrorException; -import javax.ws.rs.NotFoundException; -import javax.ws.rs.ServiceUnavailableException; +import jakarta.json.Json; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.ws.rs.BadRequestException; +import jakarta.ws.rs.InternalServerErrorException; +import jakarta.ws.rs.NotFoundException; +import jakarta.ws.rs.ServiceUnavailableException; public class PidUtil { diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/UnmanagedDOIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/UnmanagedDOIServiceBean.java index 20f1051763f..f7e9372cc9b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/UnmanagedDOIServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/UnmanagedDOIServiceBean.java @@ -1,13 +1,12 @@ package edu.harvard.iq.dataverse.pidproviders; import java.io.IOException; -import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.logging.Logger; -import javax.annotation.PostConstruct; -import javax.ejb.Stateless; +import jakarta.annotation.PostConstruct; +import jakarta.ejb.Stateless; import org.apache.commons.httpclient.HttpException; import org.apache.commons.lang3.NotImplementedException; diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/UnmanagedHandlenetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/UnmanagedHandlenetServiceBean.java index 8847a99bd20..c856c5363e0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/UnmanagedHandlenetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/UnmanagedHandlenetServiceBean.java @@ -7,7 +7,7 @@ import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.Stateless; +import jakarta.ejb.Stateless; import org.apache.commons.lang3.NotImplementedException; /** This class is just used to parse Handles that are not managed by any account configured in Dataverse diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java index b0658f10b34..9af4bb6af9e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java @@ -4,10 +4,10 @@ import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import java.io.Serializable; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; /** * Backing bean for JSF page. Sets session to {@link PrivateUrlUser}. diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java index 8eb0dfe4ebd..9e5879106e4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java @@ -8,14 +8,14 @@ import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.Serializable; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.NonUniqueResultException; -import javax.persistence.PersistenceContext; -import javax.persistence.TypedQuery; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.NonUniqueResultException; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.TypedQuery; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvEntityFileDataConverter.java b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvEntityFileDataConverter.java index 65b97b80eea..ba5aba1b69b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvEntityFileDataConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvEntityFileDataConverter.java @@ -1,11 +1,11 @@ package edu.harvard.iq.dataverse.provenance; -import javax.enterprise.inject.spi.CDI; -import javax.faces.component.UIComponent; -import javax.faces.context.FacesContext; -import javax.faces.convert.Converter; -import javax.faces.convert.FacesConverter; -import javax.inject.Inject; +import jakarta.enterprise.inject.spi.CDI; +import jakarta.faces.component.UIComponent; +import jakarta.faces.context.FacesContext; +import jakarta.faces.convert.Converter; +import jakarta.faces.convert.FacesConverter; +import jakarta.inject.Inject; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvInvestigator.java b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvInvestigator.java index a17e77f2a9e..c2eecb90d9e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvInvestigator.java +++ b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvInvestigator.java @@ -5,12 +5,12 @@ import com.google.gson.GsonBuilder; import com.google.gson.JsonElement; import com.google.gson.JsonParser; -import edu.harvard.iq.dataverse.api.AbstractApiBean; + import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.logging.Logger; -import javax.json.JsonObject; +import jakarta.json.JsonObject; import org.everit.json.schema.Schema; import org.everit.json.schema.ValidationException; import org.everit.json.schema.loader.SchemaLoader; diff --git a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java index a4b7cdf8d4e..6e8a512902a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java @@ -24,19 +24,19 @@ import java.util.HashMap; import java.util.Map; import java.util.logging.Level; -import javax.ejb.EJB; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; import org.apache.commons.io.IOUtils; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; -import javax.faces.application.FacesMessage; -import javax.faces.context.ExternalContext; -import javax.faces.context.FacesContext; -import javax.json.JsonObject; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.context.ExternalContext; +import jakarta.faces.context.FacesContext; +import jakarta.json.JsonObject; import org.primefaces.model.file.UploadedFile; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerPage.java b/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerPage.java index 4d89a2842cd..c252d2e3330 100644 --- a/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerPage.java @@ -6,10 +6,10 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import java.util.List; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.json.JsonArray; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.json.JsonArray; @Stateless @Named diff --git a/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtil.java b/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtil.java index ee52254d6f5..8501fba3ce0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtil.java @@ -3,15 +3,15 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.locality.StorageSite; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import edu.harvard.iq.dataverse.util.SystemConfig; + import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObject; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObject; public class RepositoryStorageAbstractionLayerUtil { diff --git a/src/main/java/edu/harvard/iq/dataverse/search/AdvancedSearchPage.java b/src/main/java/edu/harvard/iq/dataverse/search/AdvancedSearchPage.java index ef37569ac54..bc92959a5ac 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/AdvancedSearchPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/AdvancedSearchPage.java @@ -14,20 +14,16 @@ import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; -import java.util.StringTokenizer; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.faces.view.ViewScoped; -import javax.inject.Inject; -import javax.inject.Named; -import javax.json.JsonObject; +import jakarta.ejb.EJB; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.json.JsonObject; import org.apache.commons.lang3.StringUtils; diff --git a/src/main/java/edu/harvard/iq/dataverse/search/FacetLabel.java b/src/main/java/edu/harvard/iq/dataverse/search/FacetLabel.java index f8bd0ea5a10..f97c1641479 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/FacetLabel.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/FacetLabel.java @@ -1,6 +1,6 @@ package edu.harvard.iq.dataverse.search; -import javax.inject.Named; +import jakarta.inject.Named; @Named public class FacetLabel implements Comparable{ diff --git a/src/main/java/edu/harvard/iq/dataverse/search/Highlight.java b/src/main/java/edu/harvard/iq/dataverse/search/Highlight.java index 98a882c13ca..d40d8c362af 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/Highlight.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/Highlight.java @@ -1,7 +1,7 @@ package edu.harvard.iq.dataverse.search; import java.util.List; -import javax.inject.Named; +import jakarta.inject.Named; @Named public class Highlight { diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexAsync.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexAsync.java index a04ae934259..c9cf1cd9dc7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexAsync.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexAsync.java @@ -5,9 +5,9 @@ import java.util.Collection; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.Asynchronous; -import javax.ejb.EJB; -import javax.ejb.Stateless; +import jakarta.ejb.Asynchronous; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; @Stateless public class IndexAsync { diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexBatchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexBatchServiceBean.java index 932f58d875d..0eeb681514c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexBatchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexBatchServiceBean.java @@ -1,30 +1,27 @@ package edu.harvard.iq.dataverse.search; -import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetServiceBean; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.DvObjectServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.IOException; -import java.sql.Timestamp; import java.util.ArrayList; -import java.util.Date; import java.util.List; import java.util.concurrent.Future; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.AsyncResult; -import javax.ejb.Asynchronous; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; +import jakarta.ejb.AsyncResult; +import jakarta.ejb.Asynchronous; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; import org.apache.solr.client.solrj.SolrServerException; @Named diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index 669125090b8..d6d0be7a17b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -41,7 +41,6 @@ import java.io.IOException; import java.io.InputStream; import java.sql.Timestamp; -import java.text.NumberFormat; import java.text.SimpleDateFormat; import java.time.LocalDate; import java.util.ArrayList; @@ -54,26 +53,25 @@ import java.util.List; import java.util.Locale; import java.util.Map; -import java.util.Optional; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Future; import java.util.function.Function; import java.util.logging.Logger; import java.util.stream.Collectors; -import javax.annotation.PostConstruct; -import javax.annotation.PreDestroy; -import javax.ejb.AsyncResult; -import javax.ejb.Asynchronous; -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.ejb.Stateless; -import javax.ejb.TransactionAttribute; -import static javax.ejb.TransactionAttributeType.REQUIRES_NEW; -import javax.inject.Named; -import javax.json.JsonObject; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; +import jakarta.annotation.PostConstruct; +import jakarta.annotation.PreDestroy; +import jakarta.ejb.AsyncResult; +import jakarta.ejb.Asynchronous; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBException; +import jakarta.ejb.Stateless; +import jakarta.ejb.TransactionAttribute; +import static jakarta.ejb.TransactionAttributeType.REQUIRES_NEW; +import jakarta.inject.Named; +import jakarta.json.JsonObject; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchFilesServiceBean.java index 2bf8807e301..8caee7d16b4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchFilesServiceBean.java @@ -7,11 +7,11 @@ import java.util.ArrayList; import java.util.List; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.faces.context.FacesContext; -import javax.inject.Named; -import javax.servlet.http.HttpServletRequest; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.faces.context.FacesContext; +import jakarta.inject.Named; +import jakarta.servlet.http.HttpServletRequest; @Named @Stateless diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index bfe397cf48c..2ce06541afa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -8,8 +8,6 @@ import edu.harvard.iq.dataverse.DatasetFieldType; import edu.harvard.iq.dataverse.DatasetFieldType.FieldType; import edu.harvard.iq.dataverse.DatasetServiceBean; -import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.DatasetVersion.VersionState; import edu.harvard.iq.dataverse.DatasetVersionServiceBean; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DataverseFacet; @@ -37,12 +35,12 @@ import java.util.Optional; import java.util.Set; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.enterprise.context.RequestScoped; -import javax.faces.context.FacesContext; -import javax.inject.Inject; -import javax.inject.Named; -import javax.servlet.http.HttpServletRequest; +import jakarta.ejb.EJB; +import jakarta.enterprise.context.RequestScoped; +import jakarta.faces.context.FacesContext; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.servlet.http.HttpServletRequest; import org.apache.commons.lang3.StringUtils; @@ -215,7 +213,7 @@ public String searchRedirect(String dataverseRedirectPage, Dataverse dataverseIn qParam = "&q=" + query; } - return widgetWrapper.wrapURL(dataverseRedirectPage + "?faces-redirect=true&q=" + qParam + optionalDataverseScope); + return widgetWrapper.wrapURL(dataverseRedirectPage + "?faces-redirect=true" + qParam + optionalDataverseScope); } diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchPermissionsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchPermissionsServiceBean.java index e96164d442d..0dd2153f75b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchPermissionsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchPermissionsServiceBean.java @@ -22,9 +22,9 @@ import java.util.Map; import java.util.Set; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; /** * Determine whether items should be searchable. diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java index 9ac4b2406e8..44976d232c2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java @@ -20,7 +20,6 @@ import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.IOException; import java.lang.reflect.Field; -import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; @@ -36,16 +35,16 @@ import java.util.MissingResourceException; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.EJBTransactionRolledbackException; -import javax.ejb.Stateless; -import javax.ejb.TransactionRolledbackLocalException; -import javax.inject.Named; -import javax.persistence.NoResultException; +import jakarta.ejb.EJB; +import jakarta.ejb.EJBTransactionRolledbackException; +import jakarta.ejb.Stateless; +import jakarta.ejb.TransactionRolledbackLocalException; +import jakarta.inject.Named; +import jakarta.persistence.NoResultException; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrQuery.SortClause; import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.impl.HttpSolrClient.RemoteSolrException; +import org.apache.solr.client.solrj.impl.BaseHttpSolrClient.RemoteSolrException; import org.apache.solr.client.solrj.response.FacetField; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.RangeFacet; diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrClientService.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrClientService.java index 0dc2fe08b54..b36130de7c8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SolrClientService.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrClientService.java @@ -10,11 +10,11 @@ import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.impl.HttpSolrClient; -import javax.annotation.PostConstruct; -import javax.annotation.PreDestroy; -import javax.ejb.EJB; -import javax.ejb.Singleton; -import javax.inject.Named; +import jakarta.annotation.PostConstruct; +import jakarta.annotation.PreDestroy; +import jakarta.ejb.EJB; +import jakarta.ejb.Singleton; +import jakarta.inject.Named; import java.io.IOException; import java.util.logging.Logger; diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java index 5856004ce53..04021eb75b6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java @@ -20,11 +20,11 @@ import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.json.Json; -import javax.json.JsonObjectBuilder; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.json.Json; +import jakarta.json.JsonObjectBuilder; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.response.UpdateResponse; import org.apache.solr.common.SolrInputDocument; diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrQueryResponse.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrQueryResponse.java index b499b80961e..893099ff08d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SolrQueryResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrQueryResponse.java @@ -5,8 +5,8 @@ import java.util.List; import java.util.Map; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonObjectBuilder; +import jakarta.json.Json; +import jakarta.json.JsonObjectBuilder; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.response.FacetField; diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java index b58c22b64c9..6ad7f9dbbf6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java @@ -9,10 +9,10 @@ import java.util.Map; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; import org.apache.commons.collections4.CollectionUtils; diff --git a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearch.java b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearch.java index 66bb63ed596..ff4a2e4aa96 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearch.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearch.java @@ -5,16 +5,16 @@ import java.io.Serializable; import java.util.ArrayList; import java.util.List; -import javax.persistence.CascadeType; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.OneToMany; -import javax.persistence.Table; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.OneToMany; +import jakarta.persistence.Table; @Entity @Table(indexes = {@Index(columnList="definitionpoint_id") diff --git a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchFilterQuery.java b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchFilterQuery.java index f884a9529c9..7f51f9c8728 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchFilterQuery.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchFilterQuery.java @@ -1,16 +1,16 @@ package edu.harvard.iq.dataverse.search.savedsearch; import java.io.Serializable; -import javax.persistence.CascadeType; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.Table; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Table; @Entity @Table(indexes = {@Index(columnList="savedsearch_id")}) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java index 587e054dc4a..7fc2bdf79a3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java @@ -21,24 +21,23 @@ import edu.harvard.iq.dataverse.search.SortBy; import edu.harvard.iq.dataverse.util.SystemConfig; import java.util.ArrayList; -import java.util.Collections; import java.util.Date; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; -import javax.ejb.Schedule; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObjectBuilder; -import javax.persistence.EntityManager; -import javax.persistence.NoResultException; -import javax.persistence.NonUniqueResultException; -import javax.persistence.PersistenceContext; -import javax.persistence.TypedQuery; -import javax.servlet.http.HttpServletRequest; +import jakarta.ejb.EJB; +import jakarta.ejb.Schedule; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObjectBuilder; +import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; +import jakarta.persistence.NonUniqueResultException; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.TypedQuery; +import jakarta.servlet.http.HttpServletRequest; @Stateless @Named diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/Setting.java b/src/main/java/edu/harvard/iq/dataverse/settings/Setting.java index 160ed693eee..b1910a2fbb5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/Setting.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/Setting.java @@ -2,13 +2,13 @@ import java.io.Serializable; import java.util.Objects; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.Id; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.Id; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; /** * A single value in the config of dataverse. diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index ead0ac3bd39..2826df74ed1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -5,16 +5,16 @@ import edu.harvard.iq.dataverse.api.ApiBlockingFilter; import edu.harvard.iq.dataverse.util.StringUtil; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonObject; -import javax.json.JsonReader; -import javax.json.JsonValue; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonObject; +import jakarta.json.JsonReader; +import jakarta.json.JsonValue; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; import org.json.JSONArray; import org.json.JSONException; diff --git a/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapServiceBean.java index 14db98e540e..a51acd1f54f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapServiceBean.java @@ -3,8 +3,8 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.Dataverse; import java.util.List; -import javax.ejb.Asynchronous; -import javax.ejb.Stateless; +import jakarta.ejb.Asynchronous; +import jakarta.ejb.Stateless; @Stateless public class SiteMapServiceBean { diff --git a/src/main/java/edu/harvard/iq/dataverse/timer/DataverseTimerServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/timer/DataverseTimerServiceBean.java index b132bff9429..6eb3a8df0bc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/timer/DataverseTimerServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/timer/DataverseTimerServiceBean.java @@ -25,17 +25,17 @@ import java.util.Iterator; import java.util.logging.Level; import java.util.logging.Logger; -import javax.annotation.PostConstruct; -import javax.annotation.Resource; -import javax.ejb.EJB; -import javax.ejb.Singleton; -import javax.ejb.Startup; -import javax.ejb.Timeout; -import javax.ejb.Timer; -import javax.ejb.TimerConfig; -import javax.ejb.TransactionAttribute; -import javax.ejb.TransactionAttributeType; -import javax.servlet.http.HttpServletRequest; +import jakarta.annotation.PostConstruct; +import jakarta.annotation.Resource; +import jakarta.ejb.EJB; +import jakarta.ejb.Singleton; +import jakarta.ejb.Startup; +import jakarta.ejb.Timeout; +import jakarta.ejb.Timer; +import jakarta.ejb.TimerConfig; +import jakarta.ejb.TransactionAttribute; +import jakarta.ejb.TransactionAttributeType; +import jakarta.servlet.http.HttpServletRequest; /** @@ -55,7 +55,7 @@ public class DataverseTimerServiceBean implements Serializable { private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.timer.DataverseTimerServiceBean"); @Resource - javax.ejb.TimerService timerService; + jakarta.ejb.TimerService timerService; @EJB HarvesterServiceBean harvesterService; @EJB @@ -109,7 +109,7 @@ public void createTimer(Date initialExpiration, long intervalDuration, Serializa */ @Timeout @TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED) - public void handleTimeout(javax.ejb.Timer timer) { + public void handleTimeout(jakarta.ejb.Timer timer) { // We have to put all the code in a try/catch block because // if an exception is thrown from this method, Glassfish will automatically // call the method a second time. (The minimum number of re-tries for a Timer method is 1) diff --git a/src/main/java/edu/harvard/iq/dataverse/userdata/UserListResult.java b/src/main/java/edu/harvard/iq/dataverse/userdata/UserListResult.java index 07937638607..5e7f532d2ac 100644 --- a/src/main/java/edu/harvard/iq/dataverse/userdata/UserListResult.java +++ b/src/main/java/edu/harvard/iq/dataverse/userdata/UserListResult.java @@ -7,14 +7,13 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.mydata.Pager; -import edu.harvard.iq.dataverse.util.BundleUtil; + import java.util.ArrayList; import java.util.List; -import java.util.logging.Level; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObjectBuilder; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObjectBuilder; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java index 26694c0ac6c..922e6ff5d28 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java @@ -12,7 +12,7 @@ import java.util.logging.Logger; import java.util.Map; import java.util.HashMap; -import javax.faces.context.FacesContext; +import jakarta.faces.context.FacesContext; public class BundleUtil { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/ClockUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/ClockUtil.java index 9c1c89430d5..d51f70229a9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/ClockUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/ClockUtil.java @@ -1,8 +1,8 @@ package edu.harvard.iq.dataverse.util; -import javax.enterprise.inject.Produces; -import javax.inject.Qualifier; -import javax.inject.Singleton; +import jakarta.enterprise.inject.Produces; +import jakarta.inject.Qualifier; +import jakarta.inject.Singleton; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/ConstraintViolationUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/ConstraintViolationUtil.java index d2e59fac9f5..1910fde6489 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/ConstraintViolationUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/ConstraintViolationUtil.java @@ -5,8 +5,8 @@ */ package edu.harvard.iq.dataverse.util; -import javax.validation.ConstraintViolation; -import javax.validation.ConstraintViolationException; +import jakarta.validation.ConstraintViolation; +import jakarta.validation.ConstraintViolationException; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/util/DataSourceProducer.java b/src/main/java/edu/harvard/iq/dataverse/util/DataSourceProducer.java index 800c05ae6dc..62cd318706f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/DataSourceProducer.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/DataSourceProducer.java @@ -1,12 +1,12 @@ package edu.harvard.iq.dataverse.util; -import javax.annotation.Resource; -import javax.annotation.sql.DataSourceDefinition; -import javax.enterprise.inject.Produces; -import javax.inject.Singleton; +import jakarta.annotation.Resource; +import jakarta.annotation.sql.DataSourceDefinition; +import jakarta.enterprise.inject.Produces; +import jakarta.inject.Singleton; import javax.sql.DataSource; -// Find docs here: https://javaee.github.io/javaee-spec/javadocs/javax/annotation/sql/DataSourceDefinition.html +// Find docs here: https://jakarta.ee/specifications/annotations/2.1/apidocs/jakarta.annotation/jakarta/annotation/sql/datasourcedefinition @Singleton @DataSourceDefinition( name = "java:app/jdbc/dataverse", diff --git a/src/main/java/edu/harvard/iq/dataverse/util/DatasetFieldWalker.java b/src/main/java/edu/harvard/iq/dataverse/util/DatasetFieldWalker.java index df97998d9e8..25032860d11 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/DatasetFieldWalker.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/DatasetFieldWalker.java @@ -3,7 +3,6 @@ import edu.harvard.iq.dataverse.ControlledVocabularyValue; import edu.harvard.iq.dataverse.DatasetField; import edu.harvard.iq.dataverse.DatasetFieldCompoundValue; -import edu.harvard.iq.dataverse.DatasetFieldServiceBean; import edu.harvard.iq.dataverse.DatasetFieldType; import edu.harvard.iq.dataverse.DatasetFieldValue; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -12,12 +11,10 @@ import java.util.Comparator; import java.util.List; import java.util.Map; -import java.util.SortedSet; -import java.util.TreeSet; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonObject; +import jakarta.json.Json; +import jakarta.json.JsonObject; /** * A means of iterating over {@link DatasetField}s, or a collection of them. diff --git a/src/main/java/edu/harvard/iq/dataverse/util/EjbUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/EjbUtil.java index cf337b0a020..fd8c8fa4a9c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/EjbUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/EjbUtil.java @@ -1,6 +1,6 @@ package edu.harvard.iq.dataverse.util; -import javax.ejb.EJBException; +import jakarta.ejb.EJBException; public class EjbUtil { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 7137db9ca78..5f7643b3115 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -64,8 +64,6 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.nio.channels.FileChannel; -import java.nio.channels.WritableByteChannel; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; @@ -87,11 +85,11 @@ import java.util.UUID; import java.util.logging.Level; import java.util.logging.Logger; -import javax.activation.MimetypesFileTypeMap; -import javax.ejb.EJBException; -import javax.enterprise.inject.spi.CDI; -import javax.json.JsonArray; -import javax.json.JsonObject; +import jakarta.activation.MimetypesFileTypeMap; +import jakarta.ejb.EJBException; +import jakarta.enterprise.inject.spi.CDI; +import jakarta.json.JsonArray; +import jakarta.json.JsonObject; import javax.xml.stream.XMLStreamConstants; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/JsfHelper.java b/src/main/java/edu/harvard/iq/dataverse/util/JsfHelper.java index 5b87b18573b..b02ac63cacd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/JsfHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/JsfHelper.java @@ -1,11 +1,9 @@ package edu.harvard.iq.dataverse.util; -import java.util.Locale; -import java.util.ResourceBundle; import java.util.logging.Level; import java.util.logging.Logger; -import javax.faces.application.FacesMessage; -import javax.faces.context.FacesContext; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.context.FacesContext; /** * Utility class for common JSF tasks. diff --git a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java index 72980c3451a..dcb6e078df6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java @@ -8,8 +8,8 @@ import java.util.Arrays; import java.util.List; import java.util.logging.Logger; -import javax.mail.internet.AddressException; -import javax.mail.internet.InternetAddress; +import jakarta.mail.internet.AddressException; +import jakarta.mail.internet.InternetAddress; public class MailUtil { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtil.java index 431013771c5..f68957ad060 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtil.java @@ -4,10 +4,10 @@ import java.util.List; import java.util.logging.Logger; -import javax.json.JsonArray; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.json.JsonString; +import jakarta.json.JsonArray; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.json.JsonString; import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/RequiredCheckboxValidator.java b/src/main/java/edu/harvard/iq/dataverse/util/RequiredCheckboxValidator.java index 0221d45e4b0..fdac50ee58a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/RequiredCheckboxValidator.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/RequiredCheckboxValidator.java @@ -1,12 +1,12 @@ package edu.harvard.iq.dataverse.util; import java.text.MessageFormat; -import javax.faces.application.FacesMessage; -import javax.faces.component.UIComponent; -import javax.faces.component.UIInput; -import javax.faces.context.FacesContext; -import javax.faces.validator.Validator; -import javax.faces.validator.ValidatorException; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.component.UIComponent; +import jakarta.faces.component.UIInput; +import jakarta.faces.context.FacesContext; +import jakarta.faces.validator.Validator; +import jakarta.faces.validator.ValidatorException; /** * from http://balusc.blogspot.com/2008/09/validate-required-checkbox.html via diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SessionUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/SessionUtil.java index 0539ea40cb8..effa1980d70 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SessionUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SessionUtil.java @@ -4,8 +4,8 @@ import java.util.HashMap; import java.util.Map.Entry; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpSession; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpSession; public class SessionUtil { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java b/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java index 54be3a8765f..2c9b7167059 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java @@ -16,9 +16,9 @@ Two configurable options allow changing the limit for the number of authors or d import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.dataset.DatasetUtil; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObjectBuilder; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObjectBuilder; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index b9459e748bf..4fed3a05976 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -13,15 +13,15 @@ import edu.harvard.iq.dataverse.validation.PasswordValidatorUtil; import org.passay.CharacterRule; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonObject; -import javax.json.JsonReader; -import javax.json.JsonString; -import javax.json.JsonValue; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonObject; +import jakarta.json.JsonReader; +import jakarta.json.JsonString; +import jakarta.json.JsonValue; import java.io.StringReader; import java.net.InetAddress; import java.net.UnknownHostException; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java index 4acf2d544e8..4ae76a7b8db 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java @@ -5,8 +5,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import javax.json.Json; -import javax.json.JsonValue; +import jakarta.json.Json; +import jakarta.json.JsonValue; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/WebloaderUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/WebloaderUtil.java index c2d9bf67236..acbdc6aa3c6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/WebloaderUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/WebloaderUtil.java @@ -1,22 +1,12 @@ package edu.harvard.iq.dataverse.util; -import java.util.Date; -import java.util.Enumeration; -import java.util.HashMap; -import java.util.Locale; -import java.util.Map.Entry; import java.util.logging.Logger; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpSession; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpSession; import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetPage; -import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.users.ApiToken; -import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; -import edu.harvard.iq.dataverse.authorization.users.User; -import edu.harvard.iq.dataverse.settings.SettingsServiceBean; public class WebloaderUtil { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java index 8061b6e339e..baba1a0cb43 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java @@ -110,7 +110,7 @@ public class BagGenerator { private String apiKey = null; - private javax.json.JsonObject oremapObject; + private jakarta.json.JsonObject oremapObject; private JsonObject aggregation; private String dataciteXml; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java index c9e71296d0f..b3995b5957e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java @@ -30,12 +30,12 @@ import java.util.logging.Level; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.json.JsonValue; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.json.JsonValue; import org.apache.commons.lang3.exception.ExceptionUtils; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMapHelper.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMapHelper.java index 6cd7f0928dc..4d63edac268 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMapHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMapHelper.java @@ -3,10 +3,10 @@ import edu.harvard.iq.dataverse.DatasetFieldServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import javax.annotation.PostConstruct; -import javax.ejb.EJB; -import javax.ejb.Singleton; -import javax.ejb.Startup; +import jakarta.annotation.PostConstruct; +import jakarta.ejb.EJB; +import jakarta.ejb.Singleton; +import jakarta.ejb.Startup; /** * This is a small helper bean diff --git a/src/main/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerFactory.java b/src/main/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerFactory.java index 53c80037223..4b0263030dc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerFactory.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerFactory.java @@ -7,10 +7,10 @@ import edu.harvard.iq.dataverse.util.bagit.data.FileDataProviderFactory; import edu.harvard.iq.dataverse.util.bagit.data.FileUtilWrapper; -import javax.annotation.PostConstruct; -import javax.ejb.EJB; -import javax.enterprise.context.SessionScoped; -import javax.inject.Named; +import jakarta.annotation.PostConstruct; +import jakarta.ejb.EJB; +import jakarta.enterprise.context.SessionScoped; +import jakarta.inject.Named; import java.io.Serializable; import java.util.Optional; import java.util.logging.Logger; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java index ee0a882a10d..3fcaf6b11ff 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java @@ -1,11 +1,10 @@ package edu.harvard.iq.dataverse.util.json; import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser; import edu.harvard.iq.dataverse.MetadataBlock; import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import edu.harvard.iq.dataverse.workflow.Workflow; -import javax.json.JsonObjectBuilder; +import jakarta.json.JsonObjectBuilder; /** * A Json printer that prints minimal data on objects. Useful when embedding diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java index 0fd9705031d..113a6128364 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java @@ -18,17 +18,17 @@ import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.json.JsonString; -import javax.json.JsonValue; -import javax.json.JsonWriter; -import javax.json.JsonWriterFactory; -import javax.json.JsonValue.ValueType; -import javax.json.stream.JsonGenerator; -import javax.ws.rs.BadRequestException; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.json.JsonString; +import jakarta.json.JsonValue; +import jakarta.json.JsonWriter; +import jakarta.json.JsonWriterFactory; +import jakarta.json.JsonValue.ValueType; +import jakarta.json.stream.JsonGenerator; +import jakarta.ws.rs.BadRequestException; import edu.harvard.iq.dataverse.ControlledVocabularyValue; import edu.harvard.iq.dataverse.Dataset; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 59290449988..febb785cd95 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -50,13 +50,13 @@ import java.util.Set; import java.util.logging.Logger; import java.util.stream.Collectors; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonObject; -import javax.json.JsonReader; -import javax.json.JsonString; -import javax.json.JsonValue; -import javax.json.JsonValue.ValueType; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonObject; +import jakarta.json.JsonReader; +import jakarta.json.JsonString; +import jakarta.json.JsonValue; +import jakarta.json.JsonValue.ValueType; /** * Parses JSON objects into domain objects. diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 5080ae524c7..b6026998bb7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -17,7 +17,6 @@ import edu.harvard.iq.dataverse.DataverseContact; import edu.harvard.iq.dataverse.DataverseFacet; import edu.harvard.iq.dataverse.DataverseTheme; -import edu.harvard.iq.dataverse.api.Datasets; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.groups.impl.maildomain.MailDomainGroup; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser; @@ -55,12 +54,10 @@ import edu.harvard.iq.dataverse.workflow.step.WorkflowStepData; import java.util.*; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObjectBuilder; -import javax.json.JsonValue; - -import org.apache.commons.collections4.CollectionUtils; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObjectBuilder; +import jakarta.json.JsonValue; import java.util.function.BiConsumer; import java.util.function.BinaryOperator; @@ -71,10 +68,10 @@ import java.util.stream.Collectors; import static java.util.stream.Collectors.toList; -import javax.ejb.EJB; -import javax.ejb.Singleton; -import javax.json.JsonArray; -import javax.json.JsonObject; +import jakarta.ejb.EJB; +import jakarta.ejb.Singleton; +import jakarta.json.JsonArray; +import jakarta.json.JsonObject; /** * Convert objects to Json. diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java index 1c7dce24680..55f9ecb5ce8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java @@ -3,10 +3,10 @@ import edu.harvard.iq.dataverse.DatasetFieldServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import javax.annotation.PostConstruct; -import javax.ejb.EJB; -import javax.ejb.Singleton; -import javax.ejb.Startup; +import jakarta.annotation.PostConstruct; +import jakarta.ejb.EJB; +import jakarta.ejb.Singleton; +import jakarta.ejb.Startup; /** * This is a small helper bean diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java index d02099eddb5..09d02854bab 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java @@ -9,11 +9,11 @@ import java.util.HashMap; import java.util.Map; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonWriter; -import javax.json.JsonWriterFactory; -import javax.json.stream.JsonGenerator; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonWriter; +import jakarta.json.JsonWriterFactory; +import jakarta.json.stream.JsonGenerator; public class JsonUtil { @@ -47,7 +47,7 @@ public static String prettyPrint(JsonArray jsonArray) { return stringWriter.toString(); } - public static String prettyPrint(javax.json.JsonObject jsonObject) { + public static String prettyPrint(jakarta.json.JsonObject jsonObject) { Map config = new HashMap<>(); config.put(JsonGenerator.PRETTY_PRINTING, true); JsonWriterFactory jsonWriterFactory = Json.createWriterFactory(config); @@ -58,13 +58,13 @@ public static String prettyPrint(javax.json.JsonObject jsonObject) { return stringWriter.toString(); } - public static javax.json.JsonObject getJsonObject(String serializedJson) { + public static jakarta.json.JsonObject getJsonObject(String serializedJson) { try (StringReader rdr = new StringReader(serializedJson)) { return Json.createReader(rdr).readObject(); } } - public static javax.json.JsonArray getJsonArray(String serializedJson) { + public static jakarta.json.JsonArray getJsonArray(String serializedJson) { try (StringReader rdr = new StringReader(serializedJson)) { return Json.createReader(rdr).readArray(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java b/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java index 59a23a43452..ef8ab39122f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java @@ -5,11 +5,11 @@ import java.math.BigDecimal; import java.math.BigInteger; import java.sql.Timestamp; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.json.JsonValue; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.json.JsonValue; /** * A JSON builder that drops any null values. If we didn't drop'em, diff --git a/src/main/java/edu/harvard/iq/dataverse/validation/EMailValidator.java b/src/main/java/edu/harvard/iq/dataverse/validation/EMailValidator.java index 5050aad5bf7..624e49623f2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/validation/EMailValidator.java +++ b/src/main/java/edu/harvard/iq/dataverse/validation/EMailValidator.java @@ -1,7 +1,7 @@ package edu.harvard.iq.dataverse.validation; -import javax.validation.ConstraintValidator; -import javax.validation.ConstraintValidatorContext; +import jakarta.validation.ConstraintValidator; +import jakarta.validation.ConstraintValidatorContext; import org.apache.commons.validator.routines.EmailValidator; diff --git a/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java index c32e6728358..41e7f1b8b22 100644 --- a/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java @@ -18,9 +18,9 @@ import java.util.logging.Logger; import java.util.regex.Pattern; import java.util.stream.Collectors; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.inject.Named; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; import org.passay.CharacterCharacteristicsRule; import org.passay.CharacterRule; import org.passay.DictionaryRule; diff --git a/src/main/java/edu/harvard/iq/dataverse/validation/URLValidator.java b/src/main/java/edu/harvard/iq/dataverse/validation/URLValidator.java index 846ae48783a..285f34d3f8c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/validation/URLValidator.java +++ b/src/main/java/edu/harvard/iq/dataverse/validation/URLValidator.java @@ -1,7 +1,6 @@ package edu.harvard.iq.dataverse.validation; -import edu.harvard.iq.dataverse.util.BundleUtil; -import javax.validation.ConstraintValidator; -import javax.validation.ConstraintValidatorContext; +import jakarta.validation.ConstraintValidator; +import jakarta.validation.ConstraintValidatorContext; import org.apache.commons.validator.routines.UrlValidator; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/validation/ValidateEmail.java b/src/main/java/edu/harvard/iq/dataverse/validation/ValidateEmail.java index 310dc950858..6ec677bd7a8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/validation/ValidateEmail.java +++ b/src/main/java/edu/harvard/iq/dataverse/validation/ValidateEmail.java @@ -11,8 +11,8 @@ import java.lang.annotation.Retention; import java.lang.annotation.Target; -import javax.validation.Constraint; -import javax.validation.Payload; +import jakarta.validation.Constraint; +import jakarta.validation.Payload; /** * * @author skraffmi diff --git a/src/main/java/edu/harvard/iq/dataverse/validation/ValidateURL.java b/src/main/java/edu/harvard/iq/dataverse/validation/ValidateURL.java index 5aaab0c2e8e..3834b119598 100644 --- a/src/main/java/edu/harvard/iq/dataverse/validation/ValidateURL.java +++ b/src/main/java/edu/harvard/iq/dataverse/validation/ValidateURL.java @@ -6,8 +6,8 @@ import java.lang.annotation.Retention; import static java.lang.annotation.RetentionPolicy.RUNTIME; import java.lang.annotation.Target; -import javax.validation.Constraint; -import javax.validation.Payload; +import jakarta.validation.Constraint; +import jakarta.validation.Payload; @Target({FIELD}) @Retention(RUNTIME) diff --git a/src/main/java/edu/harvard/iq/dataverse/validation/ValidateUserName.java b/src/main/java/edu/harvard/iq/dataverse/validation/ValidateUserName.java index 0583b70df49..6307edd073a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/validation/ValidateUserName.java +++ b/src/main/java/edu/harvard/iq/dataverse/validation/ValidateUserName.java @@ -10,11 +10,11 @@ import java.lang.annotation.Retention; import static java.lang.annotation.RetentionPolicy.RUNTIME; import java.lang.annotation.Target; -import javax.validation.Constraint; -import javax.validation.Payload; -import javax.validation.constraints.NotBlank; -import javax.validation.constraints.Size; -import javax.validation.constraints.Pattern; +import jakarta.validation.Constraint; +import jakarta.validation.Payload; +import jakarta.validation.constraints.NotBlank; +import jakarta.validation.constraints.Size; +import jakarta.validation.constraints.Pattern; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/PendingWorkflowInvocation.java b/src/main/java/edu/harvard/iq/dataverse/workflow/PendingWorkflowInvocation.java index 577e0f756de..94fefa9bc13 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/PendingWorkflowInvocation.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/PendingWorkflowInvocation.java @@ -9,14 +9,14 @@ import java.io.Serializable; import java.util.HashMap; import java.util.Map; -import javax.persistence.ElementCollection; -import javax.persistence.Entity; -import javax.persistence.FetchType; -import javax.persistence.Id; -import javax.persistence.ManyToOne; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.OneToOne; +import jakarta.persistence.ElementCollection; +import jakarta.persistence.Entity; +import jakarta.persistence.FetchType; +import jakarta.persistence.Id; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.OneToOne; /** * A workflow whose current step waits for an external system to complete a diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/Workflow.java b/src/main/java/edu/harvard/iq/dataverse/workflow/Workflow.java index 6c73ed0e64b..bd32c517bc6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/Workflow.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/Workflow.java @@ -6,16 +6,16 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import javax.persistence.CascadeType; -import javax.persistence.Entity; -import javax.persistence.FetchType; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.NamedQueries; -import javax.persistence.NamedQuery; -import javax.persistence.OneToMany; -import javax.persistence.OrderColumn; +import jakarta.persistence.CascadeType; +import jakarta.persistence.Entity; +import jakarta.persistence.FetchType; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; +import jakarta.persistence.OneToMany; +import jakarta.persistence.OrderColumn; /** * A list of steps that can be executed with a given context. diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java index cf78c4f8cdf..47f24c9b8bd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java @@ -6,7 +6,6 @@ import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.RoleAssigneeServiceBean; import edu.harvard.iq.dataverse.UserNotification; -import edu.harvard.iq.dataverse.UserNotification.Type; import edu.harvard.iq.dataverse.UserNotificationServiceBean; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; @@ -33,15 +32,15 @@ import java.util.Optional; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.Asynchronous; -import javax.ejb.EJB; -import javax.ejb.Stateless; -import javax.ejb.TransactionAttribute; -import javax.ejb.TransactionAttributeType; -import javax.inject.Inject; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.TypedQuery; +import jakarta.ejb.Asynchronous; +import jakarta.ejb.EJB; +import jakarta.ejb.Stateless; +import jakarta.ejb.TransactionAttribute; +import jakarta.ejb.TransactionAttributeType; +import jakarta.inject.Inject; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.TypedQuery; /** * Service bean for managing and executing {@link Workflow}s diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/ArchivalSubmissionWorkflowStep.java b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/ArchivalSubmissionWorkflowStep.java index 105af6a00d8..b0567bff107 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/ArchivalSubmissionWorkflowStep.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/ArchivalSubmissionWorkflowStep.java @@ -1,6 +1,5 @@ package edu.harvard.iq.dataverse.workflow.internalspi; -import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.impl.AbstractSubmitToArchiveCommand; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -10,13 +9,12 @@ import edu.harvard.iq.dataverse.workflow.step.WorkflowStep; import edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult; -import java.lang.reflect.Constructor; import java.util.HashMap; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; -import javax.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletRequest; /** * A step that submits a BagIT bag of the newly published dataset version via a diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/AuthorizedExternalStep.java b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/AuthorizedExternalStep.java index bbe200aaeb3..ee770d4057e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/AuthorizedExternalStep.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/AuthorizedExternalStep.java @@ -6,23 +6,18 @@ import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType; import edu.harvard.iq.dataverse.workflow.step.Failure; import edu.harvard.iq.dataverse.workflow.step.Pending; -import edu.harvard.iq.dataverse.workflow.step.Success; import edu.harvard.iq.dataverse.workflow.step.WorkflowStep; import edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult; import edu.harvard.iq.dataverse.workflows.WorkflowUtil; -import static edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult.OK; - -import java.io.StringReader; import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; -import java.util.regex.Pattern; -import javax.json.Json; -import javax.json.JsonObject; +import jakarta.json.Json; +import jakarta.json.JsonObject; import org.apache.commons.httpclient.HttpClient; import org.apache.commons.httpclient.HttpMethodBase; diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/LDNAnnounceDatasetVersionStep.java b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/LDNAnnounceDatasetVersionStep.java index 13024f9f68f..124eea801d9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/LDNAnnounceDatasetVersionStep.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/LDNAnnounceDatasetVersionStep.java @@ -27,12 +27,12 @@ import java.util.UUID; import java.util.logging.Level; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.json.JsonValue; +import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.json.JsonValue; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpPost; diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/step/WorkflowStepData.java b/src/main/java/edu/harvard/iq/dataverse/workflow/step/WorkflowStepData.java index a06531a2666..07bcf247533 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/step/WorkflowStepData.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/step/WorkflowStepData.java @@ -3,14 +3,14 @@ import edu.harvard.iq.dataverse.workflow.Workflow; import java.io.Serializable; import java.util.Map; -import javax.persistence.Column; -import javax.persistence.ElementCollection; -import javax.persistence.Entity; -import javax.persistence.FetchType; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.ManyToOne; +import jakarta.persistence.Column; +import jakarta.persistence.ElementCollection; +import jakarta.persistence.Entity; +import jakarta.persistence.FetchType; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.ManyToOne; /** * A database row describing a step in a workflow. Actual steps can be instantiated diff --git a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java index d03afcaa91a..7cfa226d7ba 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java @@ -5,14 +5,14 @@ import java.io.Serializable; import java.sql.Timestamp; import java.util.Date; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.EnumType; -import javax.persistence.Enumerated; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.JoinColumn; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; @Entity public class WorkflowComment implements Serializable { diff --git a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java index e6e6bfd23c8..456b829ba61 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java @@ -8,12 +8,11 @@ import java.util.logging.Level; import java.util.logging.Logger; -import javax.json.Json; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObject; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObject; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; -import edu.harvard.iq.dataverse.workflow.internalspi.PauseWithMessageStep; import edu.harvard.iq.dataverse.workflow.step.Failure; import edu.harvard.iq.dataverse.workflow.step.Success; import edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult; diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 64a1c626f13..997f0470cc3 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -46,6 +46,7 @@ description=Description subject=Subject close=Close preview=Preview +query=Query continue=Continue name=Name institution=Institution @@ -1954,6 +1955,7 @@ file.dataFilesTab.versions.widget.viewMoreInfo=To view more information about th file.dataFilesTab.versions.preloadmessage=(Loading versions...) file.previewTab.externalTools.header=Available Previews file.previewTab.button.label=Preview +file.toolsTab.button.label=File Tools file.previewTab.previews.not.available=Public previews are not available for this file. file.deleteDialog.tip=Are you sure you want to delete this dataset and all of its files? You cannot undelete this dataset. file.deleteDialog.header=Delete Dataset @@ -2109,12 +2111,23 @@ file.description.label=Description file.tags.label=Tags file.lastupdated.label=Last Updated file.DatasetVersion=Version +file.accessBtn.header.query=Query Options file.previewTab.tool.open=Open file.previewTab.header=Preview file.previewTab.presentation=File Preview Tool file.previewTab.openBtn=Open in New Window file.previewTab.exploreBtn={0} on {1} + +file.queryTab.tool.open=Open +file.queryTab.header=Query +file.queryTab.presentation=File Query Tool +file.queryTab.openBtn=Open in New Window +file.queryTab.queryBtn={0} on {1} + +#if you don't have a tool selected +file.toolTab.header=File Tools + file.metadataTab.fileMetadata.header=File Metadata file.metadataTab.fileMetadata.persistentid.label=Data File Persistent ID file.metadataTab.fileMetadata.downloadUrl.label=Download URL diff --git a/src/main/resources/META-INF/batch-jobs/FileSystemImportJob.xml b/src/main/resources/META-INF/batch-jobs/FileSystemImportJob.xml index 167fbdbec5d..0294f15e967 100644 --- a/src/main/resources/META-INF/batch-jobs/FileSystemImportJob.xml +++ b/src/main/resources/META-INF/batch-jobs/FileSystemImportJob.xml @@ -34,14 +34,14 @@ - + - + diff --git a/src/main/resources/META-INF/persistence.xml b/src/main/resources/META-INF/persistence.xml index 45552f36939..e6224dcdf01 100644 --- a/src/main/resources/META-INF/persistence.xml +++ b/src/main/resources/META-INF/persistence.xml @@ -1,11 +1,11 @@ - + - org.eclipse.persistence.jpa.PersistenceProvider java:app/jdbc/dataverse - diff --git a/src/main/webapp/WEB-INF/faces-config.xml b/src/main/webapp/WEB-INF/faces-config.xml index 2015ca55f5f..6eeb5a65baf 100644 --- a/src/main/webapp/WEB-INF/faces-config.xml +++ b/src/main/webapp/WEB-INF/faces-config.xml @@ -1,8 +1,7 @@ - + xsi:schemaLocation="https://jakarta.ee/xml/ns/jakartaee https://jakarta.ee/xml/ns/jakartaee/web-facesconfig_4_0.xsd" + version="4.0"> edu.harvard.iq.dataverse.util.LocalBundle diff --git a/src/main/webapp/WEB-INF/web.xml b/src/main/webapp/WEB-INF/web.xml index 8179ca970d5..f16da2c8e5b 100644 --- a/src/main/webapp/WEB-INF/web.xml +++ b/src/main/webapp/WEB-INF/web.xml @@ -1,5 +1,8 @@ - + Dataverse @@ -27,7 +30,7 @@ See also dev guide: https://guides.dataverse.org/en/latest/developers/debugging.html --> - javax.faces.PROJECT_STAGE + jakarta.faces.PROJECT_STAGE ${MPCONFIG=dataverse.jsf.project-stage:Production} @@ -43,23 +46,23 @@ - javax.faces.INTERPRET_EMPTY_STRING_SUBMITTED_VALUES_AS_NULL + jakarta.faces.INTERPRET_EMPTY_STRING_SUBMITTED_VALUES_AS_NULL ${MPCONFIG=dataverse.jsf.empty-string-null:true} - javax.faces.FACELETS_SKIP_COMMENTS + jakarta.faces.FACELETS_SKIP_COMMENTS ${MPCONFIG=dataverse.jsf.skip-comments:true} - javax.faces.FACELETS_BUFFER_SIZE + jakarta.faces.FACELETS_BUFFER_SIZE ${MPCONFIG=dataverse.jsf.buffer-size:102400} - javax.faces.FACELETS_REFRESH_PERIOD + jakarta.faces.FACELETS_REFRESH_PERIOD ${MPCONFIG=dataverse.jsf.refresh-period:-1} @@ -86,14 +89,9 @@ Faces Servlet - javax.faces.webapp.FacesServlet + jakarta.faces.webapp.FacesServlet 1 - - Push Servlet - org.primefaces.push.PushServlet - true - OAIServlet @@ -128,10 +126,6 @@ Faces Servlet *.xhtml - - Push Servlet - /primepush/* - OAIServlet /oai @@ -274,5 +268,13 @@ edu.harvard.iq.dataverse.api.datadeposit.SWORDv2ContainerServlet /dvn/api/data-deposit/v1.1/swordv2/edit/* + + edu.harvard.iq.dataverse.api.datadeposit.SwordFilter + edu.harvard.iq.dataverse.api.datadeposit.SwordFilter + + + edu.harvard.iq.dataverse.api.datadeposit.SwordFilter + /dvn/api/data-deposit/v1.1/swordv2/edit-media/* + diff --git a/src/main/webapp/dataset-citation.xhtml b/src/main/webapp/dataset-citation.xhtml index 9baced25be0..b42dd5e563f 100644 --- a/src/main/webapp/dataset-citation.xhtml +++ b/src/main/webapp/dataset-citation.xhtml @@ -33,19 +33,13 @@ diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index 1a049331ae4..5fac8241f13 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -903,7 +903,7 @@ filter="false"> - + diff --git a/src/main/webapp/file-download-button-fragment.xhtml b/src/main/webapp/file-download-button-fragment.xhtml index 0d99f627dbe..4021ad7bc65 100644 --- a/src/main/webapp/file-download-button-fragment.xhtml +++ b/src/main/webapp/file-download-button-fragment.xhtml @@ -323,8 +323,35 @@ - - + + + + + + + + +
  • + + #{tool.getDisplayNameLang()} + + + + #{tool.getDisplayNameLang()} + +
  • +
    + +
    diff --git a/src/main/webapp/file-edit-button-fragment.xhtml b/src/main/webapp/file-edit-button-fragment.xhtml index 8531f413b38..a31438f8e06 100644 --- a/src/main/webapp/file-edit-button-fragment.xhtml +++ b/src/main/webapp/file-edit-button-fragment.xhtml @@ -25,7 +25,6 @@ configureTools - for single file, list of configureTools for the file bean - the named value of the backing bean for the below method(s), also used by isFilePg param unrestrictFileAction - name of the method on the above bean to call for unrestrict (method must take a boolean) - editFileAction - for selected files, name of method on the above bean to send for edit metadata refreshTagsPopoupAction - for selected files, name of method on the above bean to refresh tags popup --> @@ -38,7 +37,7 @@ + action="#{bean.editFileMetadata()}"> diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml index df9e87c1719..5a60afef60c 100644 --- a/src/main/webapp/file.xhtml +++ b/src/main/webapp/file.xhtml @@ -29,6 +29,7 @@ + @@ -109,19 +110,22 @@ @@ -155,19 +159,22 @@ @@ -205,6 +212,7 @@ + @@ -342,11 +350,11 @@ - + - - + + @@ -356,17 +364,19 @@ - +