diff --git a/.changelog.yml b/.changelog.yml index bfdee0c0ca..dffef25453 100644 --- a/.changelog.yml +++ b/.changelog.yml @@ -1,5 +1,5 @@ # The full repository name -repo: go-gitea/gitea +repo: unfolding/dcs # Service type (gitea or github) service: github diff --git a/.eslintignore b/.eslintignore new file mode 100644 index 0000000000..9bcc93eaef --- /dev/null +++ b/.eslintignore @@ -0,0 +1,2 @@ +templates/custom/footer.tmpl + diff --git a/.github/workflows/cron-licenses.yml b/.github/workflows/cron-licenses.yml index 0fbcbf603d..3454972716 100644 --- a/.github/workflows/cron-licenses.yml +++ b/.github/workflows/cron-licenses.yml @@ -8,7 +8,7 @@ on: jobs: cron-licenses: runs-on: ubuntu-latest - if: github.repository == 'go-gitea/gitea' + if: github.repository == 'unfoldingWord/dcs' steps: - uses: actions/checkout@v4 - uses: actions/setup-go@v4 @@ -25,5 +25,5 @@ jobs: branch: main commit: true commit_message: "[skip ci] Updated licenses and gitignores" - remote: "git@github.com:go-gitea/gitea.git" + remote: "git@github.com:unfoldingWord/dcs.git" ssh_key: ${{ secrets.DEPLOY_KEY }} diff --git a/.github/workflows/cron-lock.yml b/.github/workflows/cron-lock.yml index 935f926cce..70cef74dbb 100644 --- a/.github/workflows/cron-lock.yml +++ b/.github/workflows/cron-lock.yml @@ -15,7 +15,7 @@ concurrency: jobs: action: runs-on: ubuntu-latest - if: github.repository == 'go-gitea/gitea' + if: github.repository == 'unfoldingWord/dcs' steps: - uses: dessant/lock-threads@v4 with: diff --git a/.github/workflows/cron-translations.yml b/.github/workflows/cron-translations.yml index b0effdee9d..bcde1ef3a3 100644 --- a/.github/workflows/cron-translations.yml +++ b/.github/workflows/cron-translations.yml @@ -8,7 +8,7 @@ on: jobs: crowdin-pull: runs-on: ubuntu-latest - if: github.repository == 'go-gitea/gitea' + if: github.repository == 'unfoldingWord/dcs' steps: - uses: actions/checkout@v4 - name: download from crowdin @@ -29,11 +29,11 @@ jobs: branch: main commit: true commit_message: "[skip ci] Updated translations via Crowdin" - remote: "git@github.com:go-gitea/gitea.git" + remote: "git@github.com:unfoldingWord/dcs.git" ssh_key: ${{ secrets.DEPLOY_KEY }} crowdin-push: runs-on: ubuntu-latest - if: github.repository == 'go-gitea/gitea' + if: github.repository == 'unfoldingWord/dcs' steps: - uses: actions/checkout@v4 - name: push translations to crowdin diff --git a/.github/workflows/deploy-release.yml b/.github/workflows/deploy-release.yml new file mode 100644 index 0000000000..277d5dd17c --- /dev/null +++ b/.github/workflows/deploy-release.yml @@ -0,0 +1,48 @@ +name: deploy-official-release-rootless + +on: + push: + tags: [ v**-dcs ] + +jobs: + release-docker: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + # fetch all commits instead of only the last as some branches are long lived and could have many between versions + # fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567 + - run: git fetch --unshallow --quiet --tags --force + - uses: docker/setup-qemu-action@v2 + - uses: docker/setup-buildx-action@v2 + - name: Docker meta - create docker tags + id: meta + uses: docker/metadata-action@v5 + with: + images: | + unfoldingword/dcs + tags: | + type=match,pattern=v(\d+),group=1 + type=match,pattern=v(\d+.\d+),group=1 + type=match,pattern=v(\d+.\d+.\d+),group=1 + type=match,pattern=v(.*)-dcs,group=1 + - name: Login to Docker Hub + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: build rootful docker image + uses: docker/build-push-action@v4 + with: + context: . + platforms: linux/amd64,linux/arm64 + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + # - name: build rootless docker image + # uses: docker/build-push-action@v4 + # with: + # context: . + # platforms: linux/amd64,linux/arm64 + # push: true + # file: Dockerfile.rootless + # tags: unfoldingword/dcs:${{ steps.clean_name.outputs.branch }}-rootless diff --git a/.github/workflows/pull-docker-dryrun.yml b/.github/workflows/pull-docker-dryrun.yml index 61f1fd5632..1bb8111e95 100644 --- a/.github/workflows/pull-docker-dryrun.yml +++ b/.github/workflows/pull-docker-dryrun.yml @@ -20,7 +20,7 @@ jobs: - uses: docker/build-push-action@v4 with: push: false - tags: gitea/gitea:linux-amd64 + tags: unfoldingword/dcs:linux-amd64 rootless: if: needs.files-changed.outputs.docker == 'true' || needs.files-changed.outputs.actions == 'true' @@ -32,4 +32,4 @@ jobs: with: push: false file: Dockerfile.rootless - tags: gitea/gitea:linux-amd64 + tags: unfoldingword/dcs:linux-amd64 diff --git a/.github/workflows/release-nightly.yml b/.github/workflows/release-nightly.yml index b70a65c070..3687927a66 100644 --- a/.github/workflows/release-nightly.yml +++ b/.github/workflows/release-nightly.yml @@ -2,7 +2,7 @@ name: release-nightly on: push: - branches: [ main, release/v* ] + branches: [ main, release/dcs/v* ] concurrency: group: ${{ github.workflow }}-${{ github.ref }} @@ -11,52 +11,52 @@ concurrency: jobs: disk-clean: uses: ./.github/workflows/disk-clean.yml - nightly-binary: - runs-on: nscloud - steps: - - uses: actions/checkout@v4 - # fetch all commits instead of only the last as some branches are long lived and could have many between versions - # fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567 - - run: git fetch --unshallow --quiet --tags --force - - uses: actions/setup-go@v4 - with: - go-version-file: go.mod - check-latest: true - - uses: actions/setup-node@v3 - with: - node-version: 20 - - run: make deps-frontend deps-backend - # xgo build - - run: make release - env: - TAGS: bindata sqlite sqlite_unlock_notify - - name: import gpg key - id: import_gpg - uses: crazy-max/ghaction-import-gpg@v5 - with: - gpg_private_key: ${{ secrets.GPGSIGN_KEY }} - passphrase: ${{ secrets.GPGSIGN_PASSPHRASE }} - - name: sign binaries - run: | - for f in dist/release/*; do - echo '${{ secrets.GPGSIGN_PASSPHRASE }}' | gpg --pinentry-mode loopback --passphrase-fd 0 --batch --yes --detach-sign -u ${{ steps.import_gpg.outputs.fingerprint }} --output "$f.asc" "$f" - done - # clean branch name to get the folder name in S3 - - name: Get cleaned branch name - id: clean_name - run: | - REF_NAME=$(echo "${{ github.ref }}" | sed -e 's/refs\/heads\///' -e 's/refs\/tags\///' -e 's/release\/v//') - echo "Cleaned name is ${REF_NAME}" - echo "branch=${REF_NAME}" >> "$GITHUB_OUTPUT" - - name: configure aws - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-region: ${{ secrets.AWS_REGION }} - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - - name: upload binaries to s3 - run: | - aws s3 sync dist/release s3://${{ secrets.AWS_S3_BUCKET }}/gitea/${{ steps.clean_name.outputs.branch }} --no-progress + # nightly-binary: + # runs-on: nscloud + # steps: + # - uses: actions/checkout@v4 + # # fetch all commits instead of only the last as some branches are long lived and could have many between versions + # # fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567 + # - run: git fetch --unshallow --quiet --tags --force + # - uses: actions/setup-go@v4 + # with: + # go-version-file: go.mod + # check-latest: true + # - uses: actions/setup-node@v3 + # with: + # node-version: 20 + # - run: make deps-frontend deps-backend + # # xgo build + # - run: make release + # env: + # TAGS: bindata sqlite sqlite_unlock_notify + # - name: import gpg key + # id: import_gpg + # uses: crazy-max/ghaction-import-gpg@v5 + # with: + # gpg_private_key: ${{ secrets.GPGSIGN_KEY }} + # passphrase: ${{ secrets.GPGSIGN_PASSPHRASE }} + # - name: sign binaries + # run: | + # for f in dist/release/*; do + # echo '${{ secrets.GPGSIGN_PASSPHRASE }}' | gpg --pinentry-mode loopback --passphrase-fd 0 --batch --yes --detach-sign -u ${{ steps.import_gpg.outputs.fingerprint }} --output "$f.asc" "$f" + # done + # # clean branch name to get the folder name in S3 + # - name: Get cleaned branch name + # id: clean_name + # run: | + # REF_NAME=$(echo "${{ github.ref }}" | sed -e 's/refs\/heads\///' -e 's/refs\/tags\///' -e 's/release\/v//') + # echo "Cleaned name is ${REF_NAME}" + # echo "branch=${REF_NAME}" >> "$GITHUB_OUTPUT" + # - name: configure aws + # uses: aws-actions/configure-aws-credentials@v4 + # with: + # aws-region: ${{ secrets.AWS_REGION }} + # aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + # aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + # - name: upload binaries to s3 + # run: | + # aws s3 sync dist/release s3://${{ secrets.AWS_S3_BUCKET }}/gitea/${{ steps.clean_name.outputs.branch }} --no-progress nightly-docker-rootful: runs-on: ubuntu-latest steps: @@ -78,7 +78,7 @@ jobs: echo "branch=nightly" >> "$GITHUB_OUTPUT" exit 0 fi - REF_NAME=$(echo "${{ github.ref }}" | sed -e 's/refs\/heads\///' -e 's/refs\/tags\///' -e 's/release\/v//') + REF_NAME=$(echo "${{ github.ref }}" | sed -e 's/refs\/heads\///' -e 's/refs\/tags\///' -e 's/release\/dcs\/v//' -e 's/-dcs$//') echo "branch=${REF_NAME}-nightly" >> "$GITHUB_OUTPUT" - name: Login to Docker Hub uses: docker/login-action@v2 @@ -93,42 +93,82 @@ jobs: context: . platforms: linux/amd64,linux/arm64 push: true - tags: gitea/gitea:${{ steps.clean_name.outputs.branch }} - nightly-docker-rootless: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - # fetch all commits instead of only the last as some branches are long lived and could have many between versions - # fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567 - - run: git fetch --unshallow --quiet --tags --force - - uses: actions/setup-go@v4 - with: - go-version-file: go.mod - check-latest: true - - uses: docker/setup-qemu-action@v2 - - uses: docker/setup-buildx-action@v2 - - name: Get cleaned branch name - id: clean_name - run: | - # if main then say nightly otherwise cleanup name - if [ "${{ github.ref }}" = "refs/heads/main" ]; then - echo "branch=nightly" >> "$GITHUB_OUTPUT" - exit 0 - fi - REF_NAME=$(echo "${{ github.ref }}" | sed -e 's/refs\/heads\///' -e 's/refs\/tags\///' -e 's/release\/v//') - echo "branch=${REF_NAME}-nightly" >> "$GITHUB_OUTPUT" - - name: Login to Docker Hub - uses: docker/login-action@v2 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: fetch go modules - run: make vendor - - name: build rootless docker image - uses: docker/build-push-action@v4 - with: - context: . - platforms: linux/amd64,linux/arm64 - push: true - file: Dockerfile.rootless - tags: gitea/gitea:${{ steps.clean_name.outputs.branch }}-rootless + tags: unfoldingword/dcs:${{ steps.clean_name.outputs.branch }} +# nightly-docker-rootless: +# runs-on: ubuntu-latest +# steps: +# - uses: actions/checkout@v4 +# # fetch all commits instead of only the last as some branches are long lived and could have many between versions +# # fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567 +# - run: git fetch --unshallow --quiet --tags --force +# - uses: actions/setup-go@v4 +# with: +# go-version: "~1.21" +# check-latest: true +# - uses: docker/setup-qemu-action@v2 +# - uses: docker/setup-buildx-action@v2 +# - name: Get cleaned branch name +# id: clean_name +# run: | +# # if main then say nightly otherwise cleanup name +# if [ "${{ github.ref }}" = "refs/heads/main" ]; then +# echo "branch=nightly" >> "$GITHUB_OUTPUT" +# exit 0 +# fi +# REF_NAME=$(echo "${{ github.ref }}" | sed -e 's/refs\/heads\///' -e 's/refs\/tags\///' -e 's/release\/v//') +# echo "branch=${REF_NAME}-nightly" >> "$GITHUB_OUTPUT" +# - name: Login to Docker Hub +# uses: docker/login-action@v2 +# with: +# username: ${{ secrets.DOCKERHUB_USERNAME }} +# password: ${{ secrets.DOCKERHUB_TOKEN }} +# - name: fetch go modules +# run: make vendor +# - name: build rootless docker image +# uses: docker/build-push-action@v4 +# with: +# context: . +# platforms: linux/amd64,linux/arm64 +# push: true +# file: Dockerfile.rootless +# tags: gitea/gitea:${{ steps.clean_name.outputs.branch }}-rootless +# ======= +# tags: gitea/gitea:${{ steps.clean_name.outputs.branch }} +# nightly-docker-rootless: +# runs-on: ubuntu-latest +# steps: +# - uses: actions/checkout@v4 +# # fetch all commits instead of only the last as some branches are long lived and could have many between versions +# # fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567 +# - run: git fetch --unshallow --quiet --tags --force +# - uses: actions/setup-go@v4 +# with: +# go-version-file: go.mod +# check-latest: true +# - uses: docker/setup-qemu-action@v2 +# - uses: docker/setup-buildx-action@v2 +# - name: Get cleaned branch name +# id: clean_name +# run: | +# # if main then say nightly otherwise cleanup name +# if [ "${{ github.ref }}" = "refs/heads/main" ]; then +# echo "branch=nightly" >> "$GITHUB_OUTPUT" +# exit 0 +# fi +# REF_NAME=$(echo "${{ github.ref }}" | sed -e 's/refs\/heads\///' -e 's/refs\/tags\///' -e 's/release\/v//') +# echo "branch=${REF_NAME}-nightly" >> "$GITHUB_OUTPUT" +# - name: Login to Docker Hub +# uses: docker/login-action@v2 +# with: +# username: ${{ secrets.DOCKERHUB_USERNAME }} +# password: ${{ secrets.DOCKERHUB_TOKEN }} +# - name: fetch go modules +# run: make vendor +# - name: build rootless docker image +# uses: docker/build-push-action@v4 +# with: +# context: . +# platforms: linux/amd64,linux/arm64 +# push: true +# file: Dockerfile.rootless +# tags: gitea/gitea:${{ steps.clean_name.outputs.branch }}-rootless diff --git a/.gitignore b/.gitignore index 53365ed0b4..93b4c8c746 100644 --- a/.gitignore +++ b/.gitignore @@ -54,7 +54,9 @@ cpu.out /dist /custom/* !/custom/conf/app.example.ini +!/custom/bin /data +/data-* /indexers /log /public/img/avatar @@ -110,3 +112,9 @@ prime/ # Manpage /man + +# Docker compose +/mysql +/data-main +/data-release +.history diff --git a/Dockerfile b/Dockerfile index b42b4daa5f..df7fed3654 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,7 +5,7 @@ ARG GOPROXY ENV GOPROXY ${GOPROXY:-direct} ARG GITEA_VERSION -ARG TAGS="sqlite sqlite_unlock_notify" +ARG TAGS="sqlite sqlite_unlock_notify sqlite_json" ENV TAGS "bindata timetzdata $TAGS" ARG CGO_EXTRA_CFLAGS @@ -41,6 +41,9 @@ RUN apk --no-cache add \ su-exec \ gnupg +#For DCS local scripts +RUN apk --no-cache add jq yq + RUN addgroup \ -S -g 1000 \ git && \ diff --git a/Dockerfile-dev b/Dockerfile-dev new file mode 100644 index 0000000000..5442b88952 --- /dev/null +++ b/Dockerfile-dev @@ -0,0 +1,55 @@ +#Build stage +FROM golang:1.18-alpine3.16 + +ARG GOPROXY +ENV GOPROXY ${GOPROXY:-direct} + +ARG GITEA_VERSION +ARG TAGS="sqlite sqlite_unlock_notify sqlite_json" +ENV TAGS "bindata timetzdata $TAGS" +ARG CGO_EXTRA_CFLAGS + +#Setup repo +COPY . ${GOPATH}/src/code.gitea.io/gitea +WORKDIR ${GOPATH}/src/code.gitea.io/gitea + +EXPOSE 22 3000 80 + +RUN apk --no-cache add \ + bash \ + ca-certificates \ + curl \ + gettext \ + linux-pam \ + openssh \ + s6 \ + sqlite \ + su-exec \ + gnupg \ + make \ + build-base \ + git \ + nodejs \ + npm \ + build-base + +RUN apk add git --repository=http://dl-cdn.alpinelinux.org/alpine/v3.16/main + +RUN addgroup \ + -S -g 1000 \ + git && \ + adduser \ + -S -H -D \ + -h /data/git \ + -s /bin/bash \ + -u 1000 \ + -G git \ + git && \ + echo "git:*" | chpasswd -e + +ENV USER git +ENV GITEA_CUSTOM /data/gitea + +VOLUME ["/data"] + +CMD sh -c 'ln -s /go/src/code.gitea.io /app'; tail -f /data/gitea/log/gitea.log diff --git a/Makefile b/Makefile index c38e88a8e0..982f060945 100644 --- a/Makefile +++ b/Makefile @@ -37,7 +37,7 @@ GO_LICENSES_PACKAGE ?= github.com/google/go-licenses@v1.6.0 GOVULNCHECK_PACKAGE ?= golang.org/x/vuln/cmd/govulncheck@v1.0.1 ACTIONLINT_PACKAGE ?= github.com/rhysd/actionlint/cmd/actionlint@v1.6.25 -DOCKER_IMAGE ?= gitea/gitea +DOCKER_IMAGE ?= unfoldingword/dcs DOCKER_TAG ?= latest DOCKER_REF := $(DOCKER_IMAGE):$(DOCKER_TAG) @@ -91,7 +91,7 @@ ifneq ($(GITHUB_REF_TYPE),branch) GITEA_VERSION ?= $(VERSION) else ifneq ($(GITHUB_REF_NAME),) - VERSION ?= $(subst release/v,,$(GITHUB_REF_NAME)) + VERSION ?= $(subst release/dcs/v,,$(GITHUB_REF_NAME)) else VERSION ?= main endif @@ -139,7 +139,7 @@ TAGS ?= TAGS_SPLIT := $(subst $(COMMA), ,$(TAGS)) TAGS_EVIDENCE := $(MAKE_EVIDENCE_DIR)/tags -TEST_TAGS ?= sqlite sqlite_unlock_notify +TEST_TAGS ?= sqlite sqlite_unlock_notify sqlite_json TAR_EXCLUDES := .git data indexers queues log node_modules $(EXECUTABLE) $(FOMANTIC_WORK_DIR)/node_modules $(DIST) $(MAKE_EVIDENCE_DIR) $(AIR_TMP_DIR) $(GO_LICENSE_TMP_DIR) @@ -976,7 +976,7 @@ generate-manpage: .PHONY: docker docker: docker build --disable-content-trust=false -t $(DOCKER_REF) . -# support also build args docker build --build-arg GITEA_VERSION=v1.2.3 --build-arg TAGS="bindata sqlite sqlite_unlock_notify" . +# support also build args docker build --build-arg GITEA_VERSION=v1.2.3 --build-arg TAGS="bindata sqlite sqlite_unlock_notify sqlite_json" . # This endif closes the if at the top of the file endif diff --git a/README.md b/README.md index aac793efe8..49bb6c7f56 100644 --- a/README.md +++ b/README.md @@ -3,26 +3,26 @@ Gitea

-

Gitea - Git with a cup of tea

+

Gitea - Git with a cup of tea - DCS Version

- - + + - - + + - - + + - - + + @@ -33,14 +33,14 @@ - + Contribute with Gitpod - - + + diff --git a/assets/lang_font_families.json b/assets/lang_font_families.json new file mode 100644 index 0000000000..8905472669 --- /dev/null +++ b/assets/lang_font_families.json @@ -0,0 +1,2132 @@ +{ + "aa": [ + "Noto Sans Ethiopic" + ], + "aae": [ + "Noto Sans Cypriot" + ], + "aat": [ + "Noto Sans Cypriot" + ], + "ab": [ + "Noto Sans Georgian" + ], + "abh": [ + "Noto Naskh Arabic" + ], + "abv": [ + "Noto Naskh Arabic" + ], + "acm": [ + "Noto Naskh Arabic" + ], + "acq": [ + "Noto Naskh Arabic" + ], + "acw": [ + "Noto Naskh Arabic" + ], + "acx": [ + "Noto Naskh Arabic" + ], + "acy": [ + "Noto Naskh Arabic" + ], + "adi": [ + "Noto Sans Tibetan" + ], + "adx": [ + "Noto Sans Tibetan" + ], + "aeb": [ + "Noto Naskh Arabic" + ], + "aec": [ + "Noto Naskh Arabic" + ], + "afb": [ + "Noto Naskh Arabic" + ], + "agj": [ + "Noto Sans Ethiopic" + ], + "ahg": [ + "Noto Sans Ethiopic" + ], + "ahk": [ + "Noto Sans Myanmar" + ], + "aij": [ + "Noto Sans Hebrew" + ], + "aio": [ + "Noto Sans Myanmar" + ], + "aiw": [ + "Noto Sans Ethiopic" + ], + "ajp": [ + "Noto Naskh Arabic" + ], + "ajt": [ + "Noto Naskh Arabic" + ], + "aju": [ + "Noto Sans Hebrew" + ], + "all": [ + "Noto Sans Malayalam" + ], + "als": [ + "Noto Sans Cypriot" + ], + "alt": [ + "Noto Sans Mongolian" + ], + "alw": [ + "Noto Sans Ethiopic" + ], + "am": [ + "Noto Sans Ethiopic" + ], + "amf": [ + "Noto Sans Ethiopic" + ], + "anr": [ + "Noto Sans Devanagari" + ], + "anu": [ + "Noto Sans Ethiopic" + ], + "apc": [ + "Noto Naskh Arabic" + ], + "apd": [ + "Noto Naskh Arabic" + ], + "aph": [ + "Noto Sans Devanagari" + ], + "ar": [ + "Noto Sans Syriac Eastern", + "Noto Sans Syriac Estrangela", + "Noto Sans Syriac Western" + ], + "arb": [ + "Noto Naskh Arabic" + ], + "arc": [ + "Noto Sans Syriac Eastern", + "Noto Sans Syriac Estrangela", + "Noto Sans Syriac Western" + ], + "arq": [ + "Noto Naskh Arabic" + ], + "ars": [ + "Noto Naskh Arabic" + ], + "ary": [ + "Noto Naskh Arabic" + ], + "arz": [ + "Noto Naskh Arabic" + ], + "as": [ + "Noto Sans Bengali" + ], + "asr": [ + "Noto Sans Devanagari" + ], + "atb": [ + "Noto Sans Lisu" + ], + "atn": [ + "Noto Naskh Arabic" + ], + "avl": [ + "Noto Naskh Arabic" + ], + "awn": [ + "Noto Sans Ethiopic" + ], + "ayl": [ + "Noto Naskh Arabic" + ], + "ayn": [ + "Noto Naskh Arabic" + ], + "ayp": [ + "Noto Naskh Arabic" + ], + "azb": [ + "Noto Naskh Arabic" + ], + "azj": [ + "Noto Naskh Arabic" + ], + "ban": [ + "Noto Sans Balinese", + "Noto Sans Javanese" + ], + "bap": [ + "Noto Sans Devanagari" + ], + "bax": [ + "Noto Sans Bamum" + ], + "bca": [ + "Noto Sans TC" + ], + "bcc": [ + "Noto Naskh Arabic" + ], + "bcq": [ + "Noto Sans Ethiopic" + ], + "bea": [ + "Noto Sans Canadian Aboriginal" + ], + "bee": [ + "Noto Sans Devanagari" + ], + "bfb": [ + "Noto Sans Devanagari" + ], + "bfq": [ + "Noto Sans Tamil" + ], + "bfs": [ + "Noto Sans TC" + ], + "bft": [ + "Noto Naskh Arabic" + ], + "bfu": [ + "Noto Sans Tibetan" + ], + "bfy": [ + "Noto Sans Devanagari" + ], + "bfz": [ + "Noto Sans Devanagari" + ], + "bgc": [ + "Noto Sans Devanagari" + ], + "bgd": [ + "Noto Sans Devanagari" + ], + "bgn": [ + "Noto Naskh Arabic" + ], + "bgp": [ + "Noto Naskh Arabic" + ], + "bgq": [ + "Noto Sans Devanagari" + ], + "bgw": [ + "Noto Sans Devanagari" + ], + "bha": [ + "Noto Sans Devanagari" + ], + "bhb": [ + "Noto Sans Devanagari" + ], + "bhd": [ + "Noto Naskh Arabic" + ], + "bhh": [ + "Noto Sans Hebrew" + ], + "bhi": [ + "Noto Sans Devanagari" + ], + "bhj": [ + "Noto Sans Devanagari" + ], + "bhu": [ + "Noto Sans Devanagari" + ], + "biy": [ + "Noto Sans Devanagari" + ], + "bji": [ + "Noto Sans Ethiopic" + ], + "bjj": [ + "Noto Sans Devanagari" + ], + "bjn": [ + "Noto Naskh Arabic" + ], + "bjt": [ + "Noto Naskh Arabic" + ], + "bku": [ + "Noto Sans Buhid" + ], + "bla": [ + "Noto Sans Canadian Aboriginal" + ], + "blr": [ + "Noto Sans Tai Le" + ], + "blt": [ + "Noto Sans Lao" + ], + "bm": [ + "Noto Sans NKo" + ], + "bmj": [ + "Noto Sans Devanagari" + ], + "bn": [ + "Noto Sans Bengali" + ], + "bns": [ + "Noto Sans Devanagari" + ], + "bo": [ + "Noto Sans Tibetan" + ], + "boz": [ + "Noto Naskh Arabic" + ], + "bpy": [ + "Noto Sans Bengali" + ], + "brb": [ + "Noto Sans Khmer" + ], + "brh": [ + "Noto Naskh Arabic" + ], + "bru": [ + "Noto Sans Lao" + ], + "brv": [ + "Noto Sans Lao" + ], + "brx": [ + "Noto Sans Bengali" + ], + "bsh": [ + "Noto Naskh Arabic" + ], + "bsk": [ + "Noto Naskh Arabic" + ], + "bsq": [ + "Noto Sans Bassa Vah" + ], + "bst": [ + "Noto Sans Ethiopic" + ], + "btv": [ + "Noto Sans Devanagari" + ], + "bug": [ + "Noto Sans Buginese" + ], + "bwe": [ + "Noto Sans Myanmar" + ], + "bxm": [ + "Noto Sans Mongolian" + ], + "bya": [ + "Noto Sans Batak" + ], + "byh": [ + "Noto Sans Devanagari" + ], + "byn": [ + "Noto Sans Ethiopic" + ], + "byw": [ + "Noto Sans Devanagari" + ], + "bze": [ + "Noto Naskh Arabic" + ], + "bzi": [ + "Noto Sans Thai" + ], + "caf": [ + "Noto Sans Canadian Aboriginal" + ], + "cbn": [ + "Noto Sans Thai" + ], + "ccp": [ + "Noto Sans Chakma" + ], + "cde": [ + "Noto Sans Telugu" + ], + "cdf": [ + "Noto Sans Bengali" + ], + "cdh": [ + "Noto Sans Devanagari" + ], + "cdi": [ + "Noto Sans Gujarati" + ], + "cdj": [ + "Noto Sans Devanagari" + ], + "cdm": [ + "Noto Sans Devanagari" + ], + "cdo": [ + "Noto Sans TC" + ], + "cdz": [ + "Noto Sans Bengali" + ], + "cfm": [ + "Noto Sans Bengali" + ], + "chp": [ + "Noto Sans Canadian Aboriginal" + ], + "chr": [ + "Noto Sans Cherokee" + ], + "chx": [ + "Noto Sans Devanagari" + ], + "cih": [ + "Noto Sans Devanagari" + ], + "ciw": [ + "Noto Sans Canadian Aboriginal" + ], + "cja": [ + "Noto Naskh Arabic" + ], + "cjm": [ + "Noto Naskh Arabic" + ], + "cjy": [ + "Noto Sans TC" + ], + "ckb": [ + "Noto Naskh Arabic" + ], + "clk": [ + "Noto Sans Devanagari" + ], + "cmn": [ + "Noto Sans TC" + ], + "cmo": [ + "Noto Sans Khmer" + ], + "cna": [ + "Noto Sans Tibetan" + ], + "cog": [ + "Noto Sans Thai" + ], + "cop": [ + "Noto Sans Coptic" + ], + "crj": [ + "Noto Sans Canadian Aboriginal" + ], + "crk": [ + "Noto Sans Canadian Aboriginal" + ], + "crl": [ + "Noto Sans Canadian Aboriginal" + ], + "crm": [ + "Noto Sans Canadian Aboriginal" + ], + "crx": [ + "Noto Sans Canadian Aboriginal" + ], + "csh": [ + "Noto Sans Myanmar" + ], + "csw": [ + "Noto Sans Canadian Aboriginal" + ], + "ctg": [ + "Noto Naskh Arabic" + ], + "ctn": [ + "Noto Sans Devanagari" + ], + "cuu": [ + "Noto Sans Tai Tham" + ], + "cwd": [ + "Noto Sans Canadian Aboriginal" + ], + "czh": [ + "Noto Sans TC" + ], + "daq": [ + "Noto Sans Devanagari" + ], + "dcc": [ + "Noto Naskh Arabic" + ], + "def": [ + "Noto Naskh Arabic" + ], + "der": [ + "Noto Sans Bengali" + ], + "dgl": [ + "Noto Naskh Arabic" + ], + "dgo": [ + "Noto Naskh Arabic" + ], + "dhd": [ + "Noto Sans Devanagari" + ], + "dhi": [ + "Noto Sans Devanagari" + ], + "dhn": [ + "Noto Sans Gujarati" + ], + "dho": [ + "Noto Sans Devanagari" + ], + "dis": [ + "Noto Sans Bengali" + ], + "dje": [ + "Noto Naskh Arabic" + ], + "dml": [ + "Noto Naskh Arabic" + ], + "dre": [ + "Noto Sans Tibetan" + ], + "drs": [ + "Noto Sans Ethiopic" + ], + "dry": [ + "Noto Sans Devanagari" + ], + "dsq": [ + "Noto Naskh Arabic" + ], + "dta": [ + "Noto Sans SC" + ], + "dty": [ + "Noto Sans Devanagari" + ], + "dub": [ + "Noto Sans Gujarati" + ], + "duh": [ + "Noto Sans Devanagari" + ], + "dus": [ + "Noto Sans Devanagari" + ], + "dv": [ + "Noto Sans Thaana" + ], + "dwr": [ + "Noto Sans Ethiopic" + ], + "dyo": [ + "Noto Naskh Arabic" + ], + "dyu": [ + "Noto Sans NKo" + ], + "dz": [ + "Noto Sans Tibetan" + ], + "eko": [ + "Noto Naskh Arabic" + ], + "el": [ + "Noto Sans Cypriot" + ], + "emg": [ + "Noto Sans Devanagari" + ], + "emk": [ + "Noto Naskh Arabic" + ], + "fa": [ + "Noto Naskh Arabic" + ], + "fay": [ + "Noto Naskh Arabic" + ], + "faz": [ + "Noto Naskh Arabic" + ], + "ff": [ + "Noto Sans Adlam" + ], + "ffm": [ + "Noto Naskh Arabic" + ], + "fia": [ + "Noto Naskh Arabic" + ], + "fub": [ + "Noto Naskh Arabic" + ], + "fuc": [ + "Noto Naskh Arabic" + ], + "fuf": [ + "Noto Sans Adlam" + ], + "fuh": [ + "Noto Naskh Arabic" + ], + "fuv": [ + "Noto Naskh Arabic" + ], + "gag": [ + "Noto Sans Cypriot" + ], + "gan": [ + "Noto Sans TC" + ], + "gas": [ + "Noto Sans Gujarati" + ], + "gau": [ + "Noto Sans Telugu" + ], + "gax": [ + "Noto Sans Ethiopic" + ], + "gaz": [ + "Noto Sans Ethiopic" + ], + "gbk": [ + "Noto Sans Devanagari" + ], + "gbl": [ + "Noto Sans Devanagari" + ], + "gbm": [ + "Noto Sans Devanagari" + ], + "gbz": [ + "Noto Naskh Arabic" + ], + "gdb": [ + "Noto Sans Telugu" + ], + "gdl": [ + "Noto Sans Ethiopic" + ], + "gez": [ + "Noto Sans Ethiopic" + ], + "ggg": [ + "Noto Naskh Arabic" + ], + "ghe": [ + "Noto Sans Devanagari" + ], + "gig": [ + "Noto Naskh Arabic" + ], + "gjk": [ + "Noto Naskh Arabic" + ], + "gju": [ + "Noto Naskh Arabic" + ], + "glk": [ + "Noto Naskh Arabic" + ], + "gmv": [ + "Noto Sans Ethiopic" + ], + "gno": [ + "Noto Sans Devanagari" + ], + "gof": [ + "Noto Sans Ethiopic" + ], + "gok": [ + "Noto Sans Devanagari" + ], + "gom": [ + "Noto Sans Devanagari" + ], + "got": [ + "Noto Sans Gothic" + ], + "gra": [ + "Noto Sans Devanagari" + ], + "grc": [ + "Noto Sans Cypriot" + ], + "grt": [ + "Noto Sans Bengali" + ], + "gru": [ + "Noto Sans Ethiopic" + ], + "gu": [ + "Noto Sans Gujarati" + ], + "guk": [ + "Noto Sans Ethiopic" + ], + "gvr": [ + "Noto Sans Devanagari" + ], + "gwc": [ + "Noto Naskh Arabic" + ], + "gwf": [ + "Noto Naskh Arabic" + ], + "gzi": [ + "Noto Naskh Arabic" + ], + "hac": [ + "Noto Naskh Arabic" + ], + "haj": [ + "Noto Sans Bengali" + ], + "hak": [ + "Noto Sans TC" + ], + "har": [ + "Noto Sans Ethiopic" + ], + "haz": [ + "Noto Naskh Arabic" + ], + "hbo": [ + "Noto Sans Hebrew" + ], + "hdy": [ + "Noto Sans Ethiopic" + ], + "he": [ + "Noto Sans Hebrew" + ], + "hi": [ + "Noto Sans Devanagari" + ], + "hif": [ + "Noto Sans Devanagari" + ], + "hlb": [ + "Noto Sans Devanagari" + ], + "hnd": [ + "Noto Naskh Arabic" + ], + "hne": [ + "Noto Sans Devanagari" + ], + "hnj": [ + "Noto Sans Thai" + ], + "hnn": [ + "Noto Sans Hanunoo" + ], + "hno": [ + "Noto Naskh Arabic" + ], + "hoc": [ + "Noto Sans Devanagari" + ], + "hoj": [ + "Noto Sans Devanagari" + ], + "how": [ + "Noto Sans TC" + ], + "hrt": [ + "Noto Sans Syriac Eastern" + ], + "hrz": [ + "Noto Naskh Arabic" + ], + "hsn": [ + "Noto Sans TC" + ], + "huy": [ + "Noto Sans Hebrew" + ], + "hy": [ + "Noto Sans Armenian" + ], + "ike": [ + "Noto Sans Canadian Aboriginal" + ], + "ikt": [ + "Noto Sans Canadian Aboriginal" + ], + "int": [ + "Noto Sans Myanmar" + ], + "iru": [ + "Noto Sans Malayalam" + ], + "itk": [ + "Noto Sans Hebrew" + ], + "ium": [ + "Noto Sans TC" + ], + "ja": [ + "Noto Sans JP" + ], + "jad": [ + "Noto Naskh Arabic" + ], + "jbe": [ + "Noto Sans Hebrew" + ], + "jbn": [ + "Noto Naskh Arabic" + ], + "jdg": [ + "Noto Naskh Arabic" + ], + "jdt": [ + "Noto Sans Hebrew" + ], + "jee": [ + "Noto Sans Devanagari" + ], + "jeh": [ + "Noto Sans Lao" + ], + "jje": [ + "Noto Sans Hangul" + ], + "jml": [ + "Noto Sans Devanagari" + ], + "jnl": [ + "Noto Sans Devanagari" + ], + "jns": [ + "Noto Sans Devanagari" + ], + "jpr": [ + "Noto Sans Hebrew" + ], + "jra": [ + "Noto Sans Khmer" + ], + "jrb": [ + "Noto Sans Hebrew" + ], + "jul": [ + "Noto Sans Devanagari" + ], + "jv": [ + "Noto Sans Javanese" + ], + "jya": [ + "Noto Sans Tibetan" + ], + "jye": [ + "Noto Sans Hebrew" + ], + "ka": [ + "Noto Sans Georgian" + ], + "kab": [ + "Noto Sans Tifinagh" + ], + "kaw": [ + "Noto Sans Balinese" + ], + "kbg": [ + "Noto Sans Tibetan" + ], + "kbr": [ + "Noto Sans Ethiopic" + ], + "kby": [ + "Noto Naskh Arabic" + ], + "kdh": [ + "Noto Naskh Arabic" + ], + "kdq": [ + "Noto Sans Bengali" + ], + "kdt": [ + "Noto Sans Khmer" + ], + "kex": [ + "Noto Sans Devanagari" + ], + "key": [ + "Noto Sans Telugu" + ], + "kfb": [ + "Noto Sans Devanagari" + ], + "kfc": [ + "Noto Sans Telugu" + ], + "kfe": [ + "Noto Sans Tamil" + ], + "kff": [ + "Noto Sans Devanagari" + ], + "kfg": [ + "Noto Sans Malayalam" + ], + "kfh": [ + "Noto Sans Malayalam" + ], + "kfk": [ + "Noto Sans Devanagari" + ], + "kfm": [ + "Noto Naskh Arabic" + ], + "kfp": [ + "Noto Sans Devanagari" + ], + "kfq": [ + "Noto Sans Devanagari" + ], + "kfs": [ + "Noto Sans Devanagari" + ], + "kfx": [ + "Noto Sans Devanagari" + ], + "kfy": [ + "Noto Sans Devanagari" + ], + "kgd": [ + "Noto Sans Lao" + ], + "kgj": [ + "Noto Sans Devanagari" + ], + "khb": [ + "New Tai Lue" + ], + "khf": [ + "Noto Sans Thai" + ], + "khg": [ + "Noto Sans Tibetan" + ], + "khk": [ + "Noto Sans Mongolian" + ], + "khn": [ + "Noto Sans Devanagari" + ], + "khr": [ + "Noto Sans Devanagari" + ], + "kht": [ + "Noto Sans Myanmar" + ], + "khw": [ + "Noto Naskh Arabic" + ], + "kip": [ + "Noto Sans Devanagari" + ], + "kjg": [ + "Noto Sans Lao" + ], + "kjl": [ + "Noto Sans Devanagari" + ], + "kjo": [ + "Noto Sans Devanagari" + ], + "kjp": [ + "Noto Sans Myanmar" + ], + "kjt": [ + "Noto Sans Thai" + ], + "kjz": [ + "Noto Sans Tibetan" + ], + "kkh": [ + "Noto Sans Tai Tham" + ], + "kkt": [ + "Noto Sans Devanagari" + ], + "kle": [ + "Noto Sans Devanagari" + ], + "klr": [ + "Noto Sans Devanagari" + ], + "kls": [ + "Noto Naskh Arabic" + ], + "km": [ + "Noto Sans Khmer" + ], + "kmc": [ + "Noto Sans TC" + ], + "kmr": [ + "Noto Naskh Arabic" + ], + "kmz": [ + "Noto Naskh Arabic" + ], + "kn": [ + "Noto Sans Kannada" + ], + "knc": [ + "Noto Naskh Arabic" + ], + "knn": [ + "Noto Sans Devanagari" + ], + "kns": [ + "Noto Sans Thai" + ], + "kqd": [ + "Noto Sans Syriac Eastern" + ], + "kqy": [ + "Noto Sans Ethiopic" + ], + "kra": [ + "Noto Sans Devanagari" + ], + "krv": [ + "Noto Sans Khmer" + ], + "ksw": [ + "Noto Sans Myanmar" + ], + "ksz": [ + "Noto Sans Devanagari" + ], + "ktb": [ + "Noto Sans Ethiopic" + ], + "ku": [ + "Noto Sans Armenian" + ], + "kuf": [ + "Noto Sans Lao" + ], + "kvq": [ + "Noto Sans Myanmar" + ], + "kvx": [ + "Noto Naskh Arabic" + ], + "kxc": [ + "Noto Sans Ethiopic" + ], + "kxd": [ + "Noto Naskh Arabic" + ], + "kxf": [ + "Noto Sans Myanmar" + ], + "kxl": [ + "Noto Sans Devanagari" + ], + "kxm": [ + "Noto Sans Khmer" + ], + "kxp": [ + "Noto Naskh Arabic" + ], + "kyu": [ + "Noto Sans Myanmar" + ], + "kyw": [ + "Noto Sans Bengali" + ], + "lad": [ + "Noto Sans Hebrew" + ], + "lae": [ + "Noto Sans Devanagari" + ], + "lax": [ + "Noto Sans Bengali" + ], + "lbc": [ + "Noto Sans Lisu" + ], + "lbf": [ + "Noto Sans Devanagari" + ], + "lbj": [ + "Noto Sans Tibetan" + ], + "lbn": [ + "Noto Sans Lao" + ], + "lbr": [ + "Noto Sans Devanagari" + ], + "lcp": [ + "Noto Sans Thai" + ], + "lep": [ + "Noto Sans Lepcha", + "Noto Sans Tibetan" + ], + "lhm": [ + "Noto Sans Devanagari" + ], + "lif": [ + "Noto Sans Limbu" + ], + "lis": [ + "Noto Sans Lisu" + ], + "lki": [ + "Noto Naskh Arabic" + ], + "lmk": [ + "Noto Sans Myanmar" + ], + "lmn": [ + "Noto Sans Devanagari" + ], + "lo": [ + "Noto Sans Lao" + ], + "lpo": [ + "Noto Sans Lisu" + ], + "lrc": [ + "Noto Naskh Arabic" + ], + "lrl": [ + "Noto Naskh Arabic" + ], + "lsa": [ + "Noto Naskh Arabic" + ], + "lsd": [ + "Noto Sans Hebrew" + ], + "luz": [ + "Noto Naskh Arabic" + ], + "lwl": [ + "Noto Sans Thai" + ], + "lwm": [ + "Noto Sans Thai" + ], + "lzz": [ + "Noto Sans Georgian" + ], + "mak": [ + "Noto Sans Buginese" + ], + "man": [ + "Noto Sans NKo" + ], + "mde": [ + "Noto Naskh Arabic" + ], + "mdr": [ + "Noto Sans Buginese" + ], + "mdx": [ + "Noto Sans Ethiopic" + ], + "mdy": [ + "Noto Sans Ethiopic" + ], + "mey": [ + "Noto Naskh Arabic" + ], + "mfa": [ + "Noto Naskh Arabic" + ], + "mfg": [ + "Noto Naskh Arabic" + ], + "mgp": [ + "Noto Sans Devanagari" + ], + "mid": [ + "Noto Sans Mandaic" + ], + "mjl": [ + "Noto Sans Devanagari" + ], + "mjt": [ + "Noto Sans Bengali" + ], + "mju": [ + "Noto Sans Telugu" + ], + "mjv": [ + "Noto Sans Malayalam" + ], + "mjz": [ + "Noto Sans Devanagari" + ], + "mkb": [ + "Noto Sans Devanagari" + ], + "mke": [ + "Noto Sans Devanagari" + ], + "mki": [ + "Noto Naskh Arabic" + ], + "mkm": [ + "Noto Sans Thai" + ], + "ml": [ + "Noto Sans Malayalam" + ], + "mlf": [ + "Noto Sans Thai" + ], + "mlq": [ + "Noto Naskh Arabic" + ], + "mmd": [ + "Noto Sans TC" + ], + "mn": [ + "Noto Sans Mongolian" + ], + "mnc": [ + "Noto Sans Mongolian" + ], + "mnh": [ + "Noto Sans Mono" + ], + "mnj": [ + "Noto Naskh Arabic" + ], + "mnk": [ + "Noto Naskh Arabic" + ], + "mnr": [ + "Noto Sans Mono" + ], + "mnw": [ + "Noto Sans Myanmar" + ], + "mpe": [ + "Noto Sans Ethiopic" + ], + "mpz": [ + "Noto Sans Thai" + ], + "mra": [ + "Noto Sans Thai" + ], + "mrd": [ + "Noto Sans Devanagari" + ], + "mrg": [ + "Noto Sans Bengali" + ], + "mrr": [ + "Noto Sans Devanagari" + ], + "mru": [ + "Noto Sans Mono" + ], + "mte": [ + "Noto Sans Mono" + ], + "mtr": [ + "Noto Sans Devanagari" + ], + "muk": [ + "Noto Sans Tibetan" + ], + "mup": [ + "Noto Sans Devanagari" + ], + "mut": [ + "Noto Sans Devanagari" + ], + "muv": [ + "Noto Sans Tamil" + ], + "muz": [ + "Noto Sans Ethiopic" + ], + "mve": [ + "Noto Naskh Arabic" + ], + "mvy": [ + "Noto Naskh Arabic" + ], + "mwt": [ + "Noto Sans Myanmar" + ], + "mww": [ + "Noto Sans Lao" + ], + "my": [ + "Noto Sans Myanmar" + ], + "mzb": [ + "Noto Naskh Arabic" + ], + "mzn": [ + "Noto Naskh Arabic" + ], + "nan": [ + "Noto Sans SC" + ], + "nbt": [ + "Noto Sans Devanagari" + ], + "ncb": [ + "Noto Sans Devanagari" + ], + "ncd": [ + "Noto Sans Devanagari" + ], + "nct": [ + "Noto Sans Bengali" + ], + "ngt": [ + "Noto Sans Lao" + ], + "nit": [ + "Noto Sans Telugu" + ], + "njz": [ + "Noto Sans Bengali" + ], + "nki": [ + "Noto Sans Bengali" + ], + "nmo": [ + "Noto Sans Bengali" + ], + "nng": [ + "Noto Sans Bengali" + ], + "nnp": [ + "Noto Sans Devanagari" + ], + "nod": [ + "Noto Sans Tai Tham" + ], + "noe": [ + "Noto Sans Devanagari" + ], + "noi": [ + "Noto Sans Devanagari" + ], + "npi": [ + "Noto Sans Devanagari" + ], + "nqo": [ + "Noto Sans NKo" + ], + "nsk": [ + "Noto Sans Canadian Aboriginal" + ], + "ntz": [ + "Noto Naskh Arabic" + ], + "nxq": [ + "Noto Sans Lisu" + ], + "nyq": [ + "Noto Naskh Arabic" + ], + "ojb": [ + "Noto Sans Canadian Aboriginal" + ], + "ojg": [ + "Noto Sans Canadian Aboriginal" + ], + "ojs": [ + "Noto Sans Canadian Aboriginal" + ], + "ojw": [ + "Noto Sans Canadian Aboriginal" + ], + "ola": [ + "Noto Sans Tibetan" + ], + "om": [ + "Noto Sans Ethiopic" + ], + "onp": [ + "Noto Sans Devanagari" + ], + "or": [ + "Noto Sans Oriya" + ], + "ort": [ + "Noto Sans Telugu" + ], + "oru": [ + "Noto Naskh Arabic" + ], + "ory": [ + "Noto Sans Oriya" + ], + "os": [ + "Noto Sans Georgian" + ], + "osa": [ + "Noto Sans Osage" + ], + "osi": [ + "Noto Sans Javanese" + ], + "otk": [ + "Noto Sans Old Turkic" + ], + "oui": [ + "Noto Sans Mongolian" + ], + "pa": [ + "Noto Sans Gurmukhi" + ], + "pac": [ + "Noto Sans Lao" + ], + "pal": [ + "Noto Sans Inscriptional Pahlavi" + ], + "pbt": [ + "Noto Naskh Arabic" + ], + "pbu": [ + "Noto Naskh Arabic" + ], + "pcc": [ + "Noto Sans TC" + ], + "pce": [ + "Noto Sans Myanmar" + ], + "pcg": [ + "Noto Sans Malayalam" + ], + "pci": [ + "Noto Sans Devanagari" + ], + "pcj": [ + "Noto Sans Telugu" + ], + "peo": [ + "Noto Sans Old Persian" + ], + "pes": [ + "Noto Naskh Arabic" + ], + "pgg": [ + "Noto Sans Devanagari" + ], + "phk": [ + "Noto Sans Myanmar" + ], + "phl": [ + "Noto Naskh Arabic" + ], + "phn": [ + "Noto Sans Phoenician" + ], + "pho": [ + "Noto Sans Lao" + ], + "phr": [ + "Noto Naskh Arabic" + ], + "pi": [ + "Noto Sans Myanmar", + "Noto Sans Sinhala", + "Noto Sans Thai" + ], + "pkr": [ + "Noto Sans Malayalam" + ], + "plk": [ + "Noto Naskh Arabic" + ], + "pll": [ + "Noto Sans Myanmar" + ], + "pnb": [ + "Noto Naskh Arabic" + ], + "pnt": [ + "Noto Sans Cypriot" + ], + "prd": [ + "Noto Naskh Arabic" + ], + "prs": [ + "Noto Naskh Arabic" + ], + "prx": [ + "Noto Naskh Arabic" + ], + "psi": [ + "Noto Naskh Arabic" + ], + "pst": [ + "Noto Naskh Arabic" + ], + "pum": [ + "Noto Sans Devanagari" + ], + "pwo": [ + "Noto Sans Myanmar" + ], + "pww": [ + "Noto Sans Thai" + ], + "pyu": [ + "Noto Sans TC" + ], + "qxq": [ + "Noto Naskh Arabic" + ], + "raa": [ + "Noto Sans Devanagari" + ], + "rab": [ + "Noto Sans Devanagari" + ], + "raf": [ + "Noto Sans Devanagari" + ], + "rah": [ + "Noto Sans Bengali" + ], + "rav": [ + "Noto Sans Devanagari" + ], + "rbb": [ + "Noto Sans Myanmar" + ], + "rdb": [ + "Noto Naskh Arabic" + ], + "rej": [ + "Noto Sans Rejang" + ], + "rhg": [ + "Noto Naskh Arabic" + ], + "rif": [ + "Noto Naskh Arabic" + ], + "rji": [ + "Noto Sans Devanagari" + ], + "rjs": [ + "Noto Sans Devanagari" + ], + "rki": [ + "Noto Sans Myanmar" + ], + "rkt": [ + "Noto Sans Bengali" + ], + "rmn": [ + "Noto Sans Cypriot" + ], + "rmt": [ + "Noto Naskh Arabic" + ], + "rmz": [ + "Noto Sans Myanmar" + ], + "rup": [ + "Noto Sans Cypriot" + ], + "rwr": [ + "Noto Sans Devanagari" + ], + "ryu": [ + "Noto Sans JP" + ], + "sa": [ + "Noto Sans Myanmar", + "Noto Sans Sinhala" + ], + "sam": [ + "Noto Sans Hebrew", + "Noto Sans Syriac Eastern", + "Noto Sans Syriac Estrangela", + "Noto Sans Syriac Western" + ], + "sas": [ + "Noto Sans Balinese" + ], + "sav": [ + "Noto Naskh Arabic" + ], + "saz": [ + "Noto Sans Saurashtra" + ], + "sce": [ + "Noto Naskh Arabic" + ], + "sck": [ + "Noto Sans Bengali" + ], + "scl": [ + "Noto Naskh Arabic" + ], + "scp": [ + "Noto Sans Devanagari" + ], + "scs": [ + "Noto Sans Canadian Aboriginal" + ], + "sd": [ + "Noto Sans Gurmukhi" + ], + "sdh": [ + "Noto Naskh Arabic" + ], + "sdr": [ + "Noto Sans Bengali" + ], + "sek": [ + "Noto Sans Canadian Aboriginal" + ], + "sgh": [ + "Noto Naskh Arabic" + ], + "sgj": [ + "Noto Sans Devanagari" + ], + "sgr": [ + "Noto Naskh Arabic" + ], + "sgw": [ + "Noto Sans Ethiopic" + ], + "shi": [ + "Noto Naskh Arabic" + ], + "shm": [ + "Noto Naskh Arabic" + ], + "shn": [ + "Noto Sans Myanmar", + "Noto Sans Thai" + ], + "shu": [ + "Noto Naskh Arabic" + ], + "shy": [ + "Noto Naskh Arabic" + ], + "si": [ + "Noto Sans Sinhala" + ], + "sid": [ + "Noto Sans Ethiopic" + ], + "sip": [ + "Noto Sans Tibetan" + ], + "sjo": [ + "Noto Sans Mongolian" + ], + "sjp": [ + "Noto Sans Devanagari" + ], + "skr": [ + "Noto Naskh Arabic" + ], + "smp": [ + "Noto Sans Samaritan" + ], + "smy": [ + "Noto Naskh Arabic" + ], + "soa": [ + "Noto Sans Tai Viet" + ], + "sog": [ + "Noto Sans Syriac Eastern", + "Noto Sans Syriac Estrangela", + "Noto Sans Syriac Western" + ], + "sou": [ + "Noto Sans Thai" + ], + "sqi": [ + "Noto Sans Cypriot" + ], + "sqt": [ + "Noto Naskh Arabic" + ], + "srb": [ + "Noto Sans Telugu" + ], + "srx": [ + "Noto Sans Devanagari" + ], + "sss": [ + "Noto Sans Lao" + ], + "ssy": [ + "Noto Sans Ethiopic" + ], + "su": [ + "Noto Sans Javanese", + "Noto Sans Sundanese" + ], + "suq": [ + "Noto Sans Ethiopic" + ], + "suv": [ + "Noto Sans Bengali" + ], + "sva": [ + "Noto Sans Georgian" + ], + "swb": [ + "Noto Naskh Arabic" + ], + "swi": [ + "Noto Sans TC" + ], + "swv": [ + "Noto Sans Devanagari" + ], + "syc": [ + "Noto Sans Syriac Eastern", + "Noto Sans Syriac Estrangela", + "Noto Sans Syriac Western" + ], + "syl": [ + "Noto Sans Bengali" + ], + "syn": [ + "Noto Sans Syriac Eastern" + ], + "syr": [ + "Noto Sans Syriac Eastern", + "Noto Sans Syriac Estrangela", + "Noto Sans Syriac Western" + ], + "ta": [ + "Noto Sans Tamil" + ], + "taj": [ + "Noto Sans Devanagari" + ], + "taq": [ + "Noto Sans Tifinagh" + ], + "tay": [ + "Noto Sans TC" + ], + "tbw": [ + "Noto Sans Tagbanwa" + ], + "tcx": [ + "Noto Sans Tamil" + ], + "tda": [ + "Noto Naskh Arabic" + ], + "tdb": [ + "Noto Sans Bengali" + ], + "tdd": [ + "Noto Sans Tai Le" + ], + "tdg": [ + "Noto Sans Devanagari" + ], + "tdh": [ + "Noto Sans Devanagari" + ], + "te": [ + "Noto Sans Telugu" + ], + "tes": [ + "Noto Sans Javanese" + ], + "tg": [ + "Noto Sans Hebrew" + ], + "th": [ + "Noto Sans Thai" + ], + "thf": [ + "Noto Sans Devanagari" + ], + "thl": [ + "Noto Sans Devanagari" + ], + "thq": [ + "Noto Sans Devanagari" + ], + "thr": [ + "Noto Sans Devanagari" + ], + "ths": [ + "Noto Sans Devanagari" + ], + "thv": [ + "Noto Naskh Arabic" + ], + "thz": [ + "Noto Sans Tifinagh" + ], + "ti": [ + "Noto Sans Ethiopic" + ], + "tig": [ + "Noto Sans Ethiopic" + ], + "tij": [ + "Noto Sans Devanagari" + ], + "tjl": [ + "Noto Sans Myanmar" + ], + "tkb": [ + "Noto Sans Devanagari" + ], + "tks": [ + "Noto Naskh Arabic" + ], + "tkt": [ + "Noto Sans Devanagari" + ], + "tl": [ + "Noto Sans Tagalog" + ], + "tly": [ + "Noto Naskh Arabic" + ], + "tmk": [ + "Noto Sans Devanagari" + ], + "tpe": [ + "Noto Sans Bengali" + ], + "tpu": [ + "Noto Sans Khmer" + ], + "tr": [ + "Noto Sans Cypriot" + ], + "trg": [ + "Noto Sans Hebrew" + ], + "trp": [ + "Noto Sans Bengali" + ], + "tru": [ + "Noto Sans Syriac Eastern" + ], + "trw": [ + "Noto Naskh Arabic" + ], + "tsd": [ + "Noto Sans Cypriot" + ], + "tsg": [ + "Noto Naskh Arabic" + ], + "tsj": [ + "Noto Sans Tibetan" + ], + "tth": [ + "Noto Sans Lao" + ], + "tto": [ + "Noto Sans Lao" + ], + "ttq": [ + "Noto Naskh Arabic" + ], + "tts": [ + "Noto Sans Thai" + ], + "twh": [ + "Noto Sans Tai Viet" + ], + "twm": [ + "Noto Sans Devanagari" + ], + "txo": [ + "Noto Sans Bengali" + ], + "tyr": [ + "Noto Sans Tai Viet" + ], + "tzm": [ + "Noto Naskh Arabic" + ], + "udg": [ + "Noto Sans Malayalam" + ], + "uga": [ + "Noto Sans Ugaritic" + ], + "unr": [ + "Noto Sans Bengali" + ], + "unx": [ + "Noto Sans Bengali" + ], + "ur": [ + "Noto Nastaliq Urdu" + ], + "uum": [ + "Noto Sans Cypriot" + ], + "uzn": [ + "Noto Naskh Arabic" + ], + "uzs": [ + "Noto Naskh Arabic" + ], + "vaa": [ + "Noto Sans Tamil" + ], + "vaf": [ + "Noto Naskh Arabic" + ], + "vah": [ + "Noto Sans Devanagari" + ], + "vai": [ + "Noto Sans Vai" + ], + "vas": [ + "Noto Sans Devanagari" + ], + "vav": [ + "Noto Sans Devanagari" + ], + "vkp": [ + "Noto Sans Devanagari" + ], + "wal": [ + "Noto Sans Ethiopic" + ], + "wbl": [ + "Noto Naskh Arabic" + ], + "wbq": [ + "Noto Sans Telugu" + ], + "wbr": [ + "Noto Sans Devanagari" + ], + "wlo": [ + "Noto Naskh Arabic" + ], + "wme": [ + "Noto Sans Devanagari" + ], + "wmw": [ + "Noto Naskh Arabic" + ], + "wni": [ + "Noto Naskh Arabic" + ], + "wtm": [ + "Noto Sans Devanagari" + ], + "wuu": [ + "Noto Sans TC" + ], + "xal": [ + "Noto Sans Mongolian" + ], + "xan": [ + "Noto Sans Ethiopic" + ], + "xcr": [ + "Noto Sans Carian" + ], + "xkf": [ + "Noto Sans Tibetan" + ], + "xkj": [ + "Noto Naskh Arabic" + ], + "xkz": [ + "Noto Sans Tibetan" + ], + "xlc": [ + "Noto Sans Lycian" + ], + "xld": [ + "Noto Sans Lydian" + ], + "xmf": [ + "Noto Sans Georgian" + ], + "xnr": [ + "Noto Sans Devanagari" + ], + "xpg": [ + "Noto Sans Cypriot" + ], + "xpr": [ + "Noto Sans Inscriptional Parthian" + ], + "xsl": [ + "Noto Sans Canadian Aboriginal" + ], + "xsr": [ + "Noto Sans Devanagari" + ], + "xub": [ + "Noto Sans Malayalam" + ], + "yal": [ + "Noto Naskh Arabic" + ], + "ybi": [ + "Noto Sans Devanagari" + ], + "ydd": [ + "Noto Sans Hebrew" + ], + "ydg": [ + "Noto Naskh Arabic" + ], + "yea": [ + "Noto Sans Malayalam" + ], + "yeu": [ + "Noto Sans Telugu" + ], + "yhd": [ + "Noto Sans Hebrew" + ], + "yi": [ + "Noto Sans Hebrew" + ], + "yih": [ + "Noto Sans Hebrew" + ], + "yka": [ + "Noto Naskh Arabic" + ], + "yud": [ + "Noto Sans Hebrew" + ], + "yue": [ + "Noto Sans TC" + ], + "zau": [ + "Noto Sans Tibetan" + ], + "zay": [ + "Noto Sans Ethiopic" + ], + "zch": [ + "Noto Sans TC" + ], + "zdj": [ + "Noto Naskh Arabic" + ], + "zeh": [ + "Noto Sans TC" + ], + "zen": [ + "Noto Sans Tifinagh" + ], + "zgb": [ + "Noto Sans TC" + ], + "zgh": [ + "Noto Sans Tifinagh" + ], + "zgm": [ + "Noto Sans TC" + ], + "zgn": [ + "Noto Sans TC" + ], + "zh": [ + "Noto Sans TC", + "Noto Sans SC" + ], + "zhd": [ + "Noto Sans TC" + ], + "zhn": [ + "Noto Sans TC" + ], + "zlj": [ + "Noto Sans TC" + ], + "zlm": [ + "Noto Naskh Arabic" + ], + "zln": [ + "Noto Sans TC" + ], + "zlq": [ + "Noto Sans TC" + ], + "zqe": [ + "Noto Sans TC" + ], + "zrg": [ + "Noto Sans Telugu" + ], + "zsm": [ + "Noto Naskh Arabic" + ], + "zyb": [ + "Noto Sans TC" + ], + "zyg": [ + "Noto Sans TC" + ], + "zyj": [ + "Noto Sans TC" + ], + "zyn": [ + "Noto Sans TC" + ], + "zzj": [ + "Noto Sans TC" + ] +} \ No newline at end of file diff --git a/assets/lang_font_links.json b/assets/lang_font_links.json new file mode 100644 index 0000000000..6f2330e8a7 --- /dev/null +++ b/assets/lang_font_links.json @@ -0,0 +1,76 @@ +{ + "New Tai Lue": "https://fonts.googleapis.com/css2?family=Noto+Sans+Tifinagh:ital,wght@0,400;0,700;1,400;1,700", + "Noto Naskh Arabic": "https://fonts.googleapis.com/css2?family=Noto+Naskh+Arabic:ital,wght@0,400;0,700;1,400;1,700", + "Noto Nastaliq Urdu": "https://fonts.googleapis.com/css2?family=Noto+Nastaliq+Urdu:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans": "https://fonts.googleapis.com/css2?family=Noto+Sans:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Adlam": "https://fonts.googleapis.com/css2?family=Noto+Sans+Adlam:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Armenian": "https://fonts.googleapis.com/css2?family=Noto+Sans+Armenian:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Balinese": "https://fonts.googleapis.com/css2?family=Noto+Sans+Balinese:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Bamum": "https://fonts.googleapis.com/css2?family=Noto+Sans+Bamum:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Bassa Vah": "https://fonts.googleapis.com/css2?family=Noto+Sans+Bassa+Vah:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Batak": "https://fonts.googleapis.com/css2?family=Noto+Sans+Batak:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Bengali": "https://fonts.googleapis.com/css2?family=Noto+Sans+Bengali:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Buginese": "https://fonts.googleapis.com/css2?family=Noto+Sans+Buginese:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Buhid": "https://fonts.googleapis.com/css2?family=Noto+Sans+Buhid:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Canadian Aboriginal": "https://fonts.googleapis.com/css2?family=Noto+Sans+Canadian+Aboriginal:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Carian": "https://fonts.googleapis.com/css2?family=Noto+Sans+Carian:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Chakma": "https://fonts.googleapis.com/css2?family=Noto+Sans+Chakma:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Cherokee": "https://fonts.googleapis.com/css2?family=Noto+Sans+Cherokee:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Coptic": "https://fonts.googleapis.com/css2?family=Noto+Sans+Coptic:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Cypriot": "https://fonts.googleapis.com/css2?family=Noto+Sans+Cypriot:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Devanagari": "https://fonts.googleapis.com/css2?family=Noto+Sans+Devanagari:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Ethiopic": "https://fonts.googleapis.com/css2?family=Noto+Sans+Ethiopic:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Georgian": "https://fonts.googleapis.com/css2?family=Noto+Sans+Georgian:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Gothic": "https://fonts.googleapis.com/css2?family=Noto+Sans+Gothic:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Gujarati": "https://fonts.googleapis.com/css2?family=Noto+Sans+Gujarati:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Gurmukhi": "https://fonts.googleapis.com/css2?family=Noto+Sans+Gurmukhi:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Hangul": "https://fonts.googleapis.com/earlyaccess/notosanshangul.css", + "Noto Sans Hanunoo": "https://fonts.googleapis.com/css2?family=Noto+Sans+Hanunoo:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Hebrew": "https://fonts.googleapis.com/css2?family=Noto+Sans+Hebrew:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Inscriptional Pahlavi": "https://fonts.googleapis.com/css2?family=Noto+Sans+Inscriptional+Pahlavi:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Inscriptional Parthian": "https://fonts.googleapis.com/css2?family=Noto+Sans+Inscriptional+Parthian:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans JP": "https://fonts.googleapis.com/css2?family=Noto+Sans+JP:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Javanese": "https://fonts.googleapis.com/css2?family=Noto+Sans+Javanese:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Kannada": "https://fonts.googleapis.com/css2?family=Noto+Sans+Kannada:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Khmer": "https://fonts.googleapis.com/css2?family=Noto+Sans+Khmer:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Lao": "https://fonts.googleapis.com/css2?family=Noto+Sans+Lao:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Lepcha": "https://fonts.googleapis.com/css2?family=Noto+Sans+Lepcha:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Limbu": "https://fonts.googleapis.com/css2?family=Noto+Sans+Limbu:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Lisu": "https://fonts.googleapis.com/css2?family=Noto+Sans+Lisu:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Lycian": "https://fonts.googleapis.com/css2?family=Noto+Sans+Lycian:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Lydian": "https://fonts.googleapis.com/css2?family=Noto+Sans+Lydian:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Malayalam": "https://fonts.googleapis.com/css2?family=Noto+Sans+Malayalam:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Mandaic": "https://fonts.googleapis.com/css2?family=Noto+Sans+Mandaic:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Mongolian": "https://fonts.googleapis.com/css2?family=Noto+Sans+Mongolian:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Mono": "https://fonts.googleapis.com/css2?family=Noto+Sans+Mono:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Myanmar": "https://fonts.googleapis.com/css2?family=Noto+Sans+Myanmar:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans NKo": "https://fonts.googleapis.com/earlyaccess/notosansnko.css", + "Noto Sans Old Persian": "https://fonts.googleapis.com/css2?family=Noto+Sans+Old+Persian:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Old Turkic": "https://fonts.googleapis.com/css2?family=Noto+Sans+Old+Turkic:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Oriya": "https://fonts.googleapis.com/css2?family=Noto+Sans+Oriya:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Osage": "https://fonts.googleapis.com/css2?family=Noto+Sans+Osage:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Phoenician": "https://fonts.googleapis.com/css2?family=Noto+Sans+Phoenician:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Rejang": "https://fonts.googleapis.com/css2?family=Noto+Sans+Rejang:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans SC": "https://fonts.googleapis.com/css2?family=Noto+Sans+SC:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Samaritan": "https://fonts.googleapis.com/css2?family=Noto+Sans+Samaritan:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Saurashtra": "https://fonts.googleapis.com/css2?family=Noto+Sans+Saurashtra:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Sinhala": "https://fonts.googleapis.com/css2?family=Noto+Sans+Sinhala:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Sundanese": "https://fonts.googleapis.com/css2?family=Noto+Sans+Sundanese:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Syriac Eastern": "https://fonts.googleapis.com/earlyaccess/notosanssyriaceastern.css", + "Noto Sans Syriac Estrangela": "https://fonts.googleapis.com/earlyaccess/notosanssyriacestrangela.css", + "Noto Sans Syriac Western": "https://fonts.googleapis.com/earlyaccess/notosanssyriacwestern.css", + "Noto Sans TC": "https://fonts.googleapis.com/css2?family=Noto+Sans+TC:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Tagalog": "https://fonts.googleapis.com/css2?family=Noto+Sans+Tagalog:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Tagbanwa": "https://fonts.googleapis.com/css2?family=Noto+Sans+Tagbanwa:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Tai Le": "https://fonts.googleapis.com/css2?family=Noto+Sans+Tai+Le:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Tai Tham": "https://fonts.googleapis.com/css2?family=Noto+Sans+Tai+Tham:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Tai Viet": "https://fonts.googleapis.com/css2?family=Noto+Sans+Tai+Viet:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Tamil": "https://fonts.googleapis.com/css2?family=Noto+Sans+Tamil:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Telugu": "https://fonts.googleapis.com/css2?family=Noto+Sans+Telugu:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Thaana": "https://fonts.googleapis.com/css2?family=Noto+Sans+Thaana:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Thai": "https://fonts.googleapis.com/css2?family=Noto+Sans+Thai:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Tibetan": "https://fonts.googleapis.com/earlyaccess/notosanstibetan.css", + "Noto Sans Tifinagh": "https://fonts.googleapis.com/css2?family=Noto+Sans+Tifinagh:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Ugaritic": "https://fonts.googleapis.com/css2?family=Noto+Sans+Ugaritic:ital,wght@0,400;0,700;1,400;1,700", + "Noto Sans Vai": "https://fonts.googleapis.com/css2?family=Noto+Sans+Vai:ital,wght@0,400;0,700;1,400;1,700" +} \ No newline at end of file diff --git a/assets/logo.svg b/assets/logo.svg index 9df6b83b56..c643bc9da2 100644 --- a/assets/logo.svg +++ b/assets/logo.svg @@ -1,31 +1 @@ - - - - - - - - - - - - + \ No newline at end of file diff --git a/cmd/door43metadata.go b/cmd/door43metadata.go new file mode 100644 index 0000000000..43e739b418 --- /dev/null +++ b/cmd/door43metadata.go @@ -0,0 +1,80 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package cmd + +import ( + "fmt" + + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/storage" + door43metadata_service "code.gitea.io/gitea/services/door43metadata" + + "github.com/urfave/cli/v2" +) + +// CmdDoor43Metadata represents the available door43metadata sub-command. +var CmdDoor43Metadata = &cli.Command{ + Name: "door43metadata", + Usage: "Scan repo(s) for the Door43 Metadata", + Description: "A command to update all repos or a repo's Door43 Metadata", + Action: runDoor43Metadata, + Flags: []cli.Flag{ + &cli.StringFlag{ + Name: "owner", + Aliases: []string{"o"}, + Value: "", + Usage: `Name of a the owner of the repo (see repo argument) to generate the door43metadata. "repo" must be set as well`, + }, + &cli.StringFlag{ + Name: "repo", + Aliases: []string{"r"}, + Value: "", + Usage: `Name of a single repo to generate the door43metadata. "owner" must also be set for this to be accepted`, + }, + }, +} + +func runDoor43Metadata(ctx *cli.Context) error { + ownerName := ctx.String("owner") + repoName := ctx.String("repo") + if ownerName != "" && repoName == "" { + return fmt.Errorf("--repo(-r) must be specified if --owner(-o) is used") + } + if ownerName == "" && repoName != "" { + return fmt.Errorf("--owner(-o) must be supplied if --repo(-r) is used") + } + + stdCtx, cancel := installSignals() + defer cancel() + + if err := initDB(stdCtx); err != nil { + return err + } + + if err := storage.Init(); err != nil { + return err + } + + if ownerName != "" && repoName != "" { + repo, err := repo_model.GetRepositoryByOwnerAndName(stdCtx, ownerName, repoName) + if err != nil { + return err + } + return door43metadata_service.ProcessDoor43MetadataForRepo(stdCtx, repo, "") + } + + err := door43metadata_service.UpdateDoor43Metadata(stdCtx) + if err != nil { + return err + } + + if repoName != "" { + log.Info("Finished gathering the door43metadata for %s/%s", ownerName, repoName) + } else { + log.Info("Finished gathering the door43metadaa for all repos") + } + + return nil +} diff --git a/cmd/main.go b/cmd/main.go index feda41e68b..dfb366c00b 100644 --- a/cmd/main.go +++ b/cmd/main.go @@ -141,6 +141,7 @@ func NewMainApp(version, versionExtra string) *cli.App { CmdMigrateStorage, CmdDumpRepository, CmdRestoreRepository, + CmdDoor43Metadata, CmdActions, cmdHelp(), // the "help" sub-command was used to show the more information for "work path" and "custom config" } diff --git a/custom/.gitignore b/custom/.gitignore new file mode 100644 index 0000000000..7ac6d1c7cc --- /dev/null +++ b/custom/.gitignore @@ -0,0 +1 @@ +app.ini diff --git a/custom/bin/README.md b/custom/bin/README.md new file mode 100644 index 0000000000..03455685b6 --- /dev/null +++ b/custom/bin/README.md @@ -0,0 +1,18 @@ +manage hashtags in gitea database. + +The following files are part of the application +# Test +- test.sh - run update by hand on a single repo +- tst + +# Production +- githook-hashtag.conf - server config values. These are changed when server environment changes. +- update - Upon a push to a ubn repository, remove all its hashtags and regenerate them by +parsing all .md files. This is started by gitea. Its name cannot change. + +# Normal Operation + +push a UBN repo to git. UBN repos have -ubn or -ubn- as part of their name. + or +sh test.sh + diff --git a/custom/bin/build.sh b/custom/bin/build.sh new file mode 100755 index 0000000000..301c18e493 --- /dev/null +++ b/custom/bin/build.sh @@ -0,0 +1,40 @@ +#!/usr/bin/env bash + +set -e +set -x + +RELEASES_DIR=/home/git/releases +GITEA_REPO=https://github.com/unfoldingWord-dev/dcs.git + +version=$1 # THIS NEEDS TO BE THE VERSION WE ARE MAKING WITHOUT the "v", e.g. 1.0.0 + +# MAKE A TEMP go DIRECTORY +cd $(mktemp -d ~/tmp/go-XXXX) + +# SET GO PATHS FOR COMPILING +export GOPATH=$(pwd) +export PATH=/usr/local/go/bin:$GOPATH/bin:$PATH + +# COMPILE GITEA FROM OUR GITEA_REPO +go get -d -u code.gitea.io/gitea +cd src/code.gitea.io +rm -rf gitea +git clone --branch master ${GITEA_REPO} gitea +cd gitea +TAGS="bindata" make generate build + +# SET GITEA PATH +export GITEA_PATH=${GOPATH}/src/code.gitea.io/gitea + +# MAKE THE RELEASE DIR +rm -rf ${RELEASES_DIR}/${version} +RELEASE_PATH=${RELEASES_DIR}/${version}/gitea +mkdir -p ${RELEASE_PATH} + +# COPY IN gitea and make custom dir from $CUSTOM_REPO +cp ${GITEA_PATH}/gitea ${RELEASE_PATH} +cp -r ${GITEA_PATH}/custom ${RELEASE_PATH} + +# TAR IT UP +tar -cvzf ${RELEASES_DIR}/linux_amd64_${version}.tar.gz -C ${RELEASES_DIR}/${version} gitea + diff --git a/custom/bin/githook-hashtag-deploy.sh b/custom/bin/githook-hashtag-deploy.sh new file mode 100755 index 0000000000..5a72e4166e --- /dev/null +++ b/custom/bin/githook-hashtag-deploy.sh @@ -0,0 +1,89 @@ +#/bin/sh +######################################################################## +# +# NAME githook-hashstag-deploy.sh - Place copy of update into hook dir +# +# DESCRIPTION Since "update" is a generic script, this deployment script +# will not overwrite someone elses work. +# +######################################################################## + +opt= +over=no + +while [ $# -gt 0 ] ; do + case $1 in + -d) opt="$opt -d" ; set -x ;; + -o) opt="$opt -o" ; over=yes ;; + -h) echo "Usage: $USAGE" ;; + *) + echo "Extra argument:" $1 + echo "Usage: $USAGE" + exit 1 + ;; + esac + + shift +done + +. ./githook-hashtag.conf + +del=" " + +myLog() { + echo $* + dte=$(date +"%Y-%m-%d_%H:%M:%S") + echo "$dte $*" >> $logFile/githook-hashtag-deploy.log +} + +plural() { + rep="s" + + if [ $1 -eq 1 ] ; then + rep="" + fi + + echo -n $rep +} + + +bmsql() { + psql --dbname=$dbname --no-align --tuples-only --field-separator="$del" < $reps + +lines=$(wc -l < $reps) +count=0 + +if [ $lines -lt 1 ] ; then + myLog "Warning: No ubn repositories detected." +else + while read user repo ; do + tgt=$repos/$user/${repo}.git/hooks + + if [ -f $tgt/update ] && [ $over = "no" ] ; then + myLog "Warning: $tgt/update already exists. Will not overwrite." + else + cp update $repos/$user/${repo}.git/hooks + count=$(( count + 1 )) + fi + done < $reps +fi + + +myLog "Updated $count repo$(plural $count)" + + +if [ $count -ne $lines ] ; then + dif=$(( $lines - $count )) + + myLog "Could not update $dif repo$(plural $dif)." +fi + +sudo rm -f $reps + diff --git a/custom/bin/githook-hashtag.conf b/custom/bin/githook-hashtag.conf new file mode 100644 index 0000000000..f95bfcb282 --- /dev/null +++ b/custom/bin/githook-hashtag.conf @@ -0,0 +1,13 @@ +# config info for githook-hashtag + +readIni() { + section=$1 + attribute=$2 + + grep -A 1000 $section $ini | grep -m 1 $attribute | sed -e 's/.*= *//' +} + + +repos=$( readIni repository ROOT) # where are repos on this server +dbname=$( readIni database NAME) # gogs database name +logFile=$(readIni log ROOT_PATH) # log to diff --git a/custom/bin/install.sh b/custom/bin/install.sh new file mode 100755 index 0000000000..97d2407043 --- /dev/null +++ b/custom/bin/install.sh @@ -0,0 +1,20 @@ +#/bin/sh +################################################################# +# +# Put parts in right places +# +################################################################# + +. ./githook-hashtag.conf + +myLog() { + echo $* + dte=$(date +s"%Y-%m-%d_%H:%M:%s") + echo "$dte $*" >> $logFile +} + +myLog "Install githook-hashtag." + +sudo cp githook-hashtag.conf /etc +sudo cp githook-hashtag-deploy.sh $appDir + diff --git a/custom/bin/post-update b/custom/bin/post-update new file mode 100644 index 0000000000..ec54053406 --- /dev/null +++ b/custom/bin/post-update @@ -0,0 +1,124 @@ +#!/bin/sh +########################################################################### +# +# NAME update - update hashtag table after push of repo +# +# DESCRIPTION Called from gitea githook this script parses hashtags from +# repo and places them in hashtag table for git +# +########################################################################### + +# Disable update + exit 0 + +refName=$1 +oldRev=$2 +newRev=$3 +ini=$4 + + +readIni() { + # config info for githook-hashtag + # ini path must be set before calling + section=$1 + attribute=$2 + + grep -A 1000 $section $ini | grep -m 1 $attribute | sed -e 's/.*= *//' +} + + +bmsql() { + # call postgress with "csv" like output and no headers + psql --dbname=$dbname --no-align --tuples-only --field-separator=" " <> $logFile/githook-hashtag-update.log +} + + +if [ x$refName = x-d ] ; then + set -x +fi + +# parameters from gogs app.ini + repos=$( readIni repository ROOT) # where are repos on this server + dbname=$( readIni database NAME) # gogs database name + logFile=$(readIni log ROOT_PATH) # log to + +# what repo am I + wd=$(pwd) + rwd=${wd%/hooks} + repo=${rwd#*repositories/} + noExt=${repo%.git} + usr=${repo%/*} + rep=${noExt#*/} + + case $rep in + *-ubn|*-ubn-*) true ;; + *) + myLog "Error: $rep is not a UBN repo" + exit 0 + ;; + esac + + myLog "Update: Updating Hashtags $noExt $*" + + lang=${rep%%-*} + + +# make a place for temp repo and other intermediate files + b="/tmp/" + tmp=$(mktemp -u repo-XXXXXX) + files=$b$(mktemp -u files-XXXXXX).txt + tags=$b$(mktemp -u tags-XXXXXX).txt + inserts=$b$(mktemp -u inserts-XXXXXX).sql + sorted=$b$(mktemp -u sorted-XXXXXX).sql + +# clone repo + cd /tmp + git clone $rwd $tmp + cd $tmp + dateStamp=$(date +"%s") + +# get user id and repo id + userId=$(bmsql "SELECT id FROM \"user\" WHERE lower_name = '$usr'") + repoId=$(bmsql "SELECT id FROM repository WHERE lower_name = '$rep' and owner_id = $userId ") + +# delete all tags for repo pages + bmsql "DELETE FROM hashtag WHERE user_id = $userId AND repo_id = $repoId" + +# get all pages in repo with markdown + > $inserts + find ./ -name '*.md' | grep -v '/.git' > $files + + while read fle ; do + filePath=${fle#*/} + + # get hashtags from page observing the following rules + # 1) only lines with # as first character + # 2) No double ## hashtags + # 3) No space after hash character + # 4) No verse markers "#v" + + grep '^#' $fle | grep -v '^##' | grep -v '^# ' | grep -v '^#v' | sed -e 's/^#//' > $tags + + while read tagName ; do + echo "INSERT INTO hashtag(user_id, repo_id, lang, tag_name, file_path, created_unix ) VALUES( $userId, $repoId, '$lang', '$tagName', '$filePath', $dateStamp );" >> $inserts + done < $tags + done < $files + +# write to db + sort -u < $inserts > $sorted + psql --dbname=$dbname -f $sorted + +rm -rf $b$tmp +rm -f $files $tags $inserts $sorted +exit 0 + diff --git a/custom/bin/test.sh b/custom/bin/test.sh new file mode 100644 index 0000000000..34be265f70 --- /dev/null +++ b/custom/bin/test.sh @@ -0,0 +1,45 @@ +#!/bin/sh + +USAGE="sh test.sh [-h] [-d] [-u ] [-r ] +where + -d - debug Turn on sh tracing + -h - Help This text + -o - override write of update script + -u - owner of repo + -r - name of ubn repo +" + +. /etc/githook-hashtag.conf + +myLog() { + echo $* + dte=$(date +s"%Y-%m-%d_%H:%M:%s") + echo "$dte $*" >> $logFile +} + + +user=bms +repo=en-ubn-act +opt= + +while [ $# -gt 0 ] ; do + case $1 in + -u) user=$2 ; shift ;; + -r) repo=$2 ; shift ;; + -d) opt="$opt -d" ;; + -o) opt="$opt -o" ;; + -h) echo "Usage: $USAGE" ;; + *) + echo "Extra argument:" $1 + echo "Usage: $USAGE" + exit 1 + ;; + esac + + shift +done + + +sh $appDir/githook-hashtag-deploy.sh $opt +sh $repos/$user/$repo.git/hooks/update $opt + diff --git a/custom/conf/app.example.ini b/custom/conf/app.example.ini index 2209822ff0..3244f3e82a 100644 --- a/custom/conf/app.example.ini +++ b/custom/conf/app.example.ini @@ -2291,6 +2291,14 @@ LEVEL = Info ;; Set the maximum number of characters in a mermaid source. (Set to -1 to disable limits) ;MERMAID_MAX_SOURCE_CHARACTERS = 5000 +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[dcs] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Door43 Preivew URL used for the Preview tab of every repo page +;DOOR43_PREIVEW_URL = https://door43.org + ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;[markup.sanitizer.1] diff --git a/docker-compose.develop.yml b/docker-compose.develop.yml new file mode 100644 index 0000000000..38150a4746 --- /dev/null +++ b/docker-compose.develop.yml @@ -0,0 +1,52 @@ +version: "3" + +services: + dcs: + container_name: dcs + image: dcs-dev:latest + build: + context: . + dockerfile: Dockerfile-dev + user: "${UID}:${GID}" + environment: + - USER_UID="${UID}" + - USER_GID="${GID}" + - GITEA__database__DB_TYPE=mysql + - GITEA__database__HOST=db:3306 + - GITEA__database__NAME=gitea-release + - GITEA__database__USER=gitea + - GITEA__database__PASSWD=gitea + - TAGS=bindata sqlite sqlite_unlock_notify sqlite_json + restart: always + networks: + - gitea + volumes: + - /etc/timezone:/etc/timezone:ro + - /etc/localtime:/etc/localtime:ro + - ./data-release:/data + - .:/go/src/code.gitea.io/gitea + ports: + - "${DCS_PORT-3000}:80" + - "222:22" + depends_on: + - db + stdin_open: true # docker run -i + tty: true # docker run -t + + db: + platform: linux/x86_64 + image: mysql:5.7 + restart: always + environment: + - MYSQL_ROOT_PASSWORD=gitea + - MYSQL_USER=gitea + - MYSQL_PASSWORD=gitea + - MYSQL_DATABASE=gitea-release + networks: + - gitea + volumes: + - ./mysql:/var/lib/mysql + +networks: + gitea: + external: false diff --git a/docker-compose.main.yml b/docker-compose.main.yml new file mode 100644 index 0000000000..111cb453f5 --- /dev/null +++ b/docker-compose.main.yml @@ -0,0 +1,45 @@ +version: "3" + +services: + dcs: + container_name: dcs + build: . + image: dcs-local:main + environment: + - USER_UID=1000 + - USER_GID=1000 + - GITEA__database__DB_TYPE=mysql + - GITEA__database__HOST=db:3306 + - GITEA__database__NAME=gitea-main + - GITEA__database__USER=gitea + - GITEA__database__PASSWD=gitea + restart: always + networks: + - gitea + volumes: + - /etc/timezone:/etc/timezone:ro + - /etc/localtime:/etc/localtime:ro + - ./data-main:/data + ports: + - "${DCS_PORT-3000}:80" + - "222:22" + depends_on: + - db + + db: + platform: linux/x86_64 + image: mysql:5.7 + restart: always + environment: + - MYSQL_ROOT_PASSWORD=gitea + - MYSQL_USER=gitea + - MYSQL_PASSWORD=gitea + - MYSQL_DATABASE=gitea-main + networks: + - gitea + volumes: + - ./mysql:/var/lib/mysql + +networks: + gitea: + external: false diff --git a/docker-compose.release.yml b/docker-compose.release.yml new file mode 100644 index 0000000000..013dc25f07 --- /dev/null +++ b/docker-compose.release.yml @@ -0,0 +1,45 @@ +version: "3" + +services: + dcs: + container_name: dcs + build: . + image: dcs-local:release + environment: + - USER_UID=1000 + - USER_GID=1000 + - GITEA__database__DB_TYPE=mysql + - GITEA__database__HOST=db:3306 + - GITEA__database__NAME=gitea-release + - GITEA__database__USER=gitea + - GITEA__database__PASSWD=gitea + restart: always + networks: + - gitea + volumes: + - /etc/timezone:/etc/timezone:ro + - /etc/localtime:/etc/localtime:ro + - ./data-release:/data + ports: + - "${DCS_PORT-3000}:80" + - "222:22" + depends_on: + - db + + db: + platform: linux/x86_64 + image: mysql:5.7 + restart: always + environment: + - MYSQL_ROOT_PASSWORD=gitea + - MYSQL_USER=gitea + - MYSQL_PASSWORD=gitea + - MYSQL_DATABASE=gitea-release + networks: + - gitea + volumes: + - ./mysql:/var/lib/mysql + +networks: + gitea: + external: false diff --git a/docker/manifest.rootless.tmpl b/docker/manifest.rootless.tmpl index 1ebf5b73c8..6ab6fccf54 100644 --- a/docker/manifest.rootless.tmpl +++ b/docker/manifest.rootless.tmpl @@ -1,4 +1,4 @@ -image: gitea/gitea:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}{{#if (hasPrefix "refs/heads/release/v" build.ref)}}{{trimPrefix "refs/heads/release/v" build.ref}}-{{/if}}nightly{{/if}}-rootless +image: unfoldingword/dcs:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}{{#if (hasPrefix "refs/heads/dcs/v" build.ref)}}{{trimPrefix "refs/heads/release/dcs/v" build.ref}}-{{/if}}nightly{{/if}}-rootless {{#if build.tags}} {{#unless (contains "-rc" build.tag)}} {{#unless (contains "-dev" build.tag)}} @@ -12,12 +12,12 @@ tags: {{/if}} manifests: - - image: gitea/gitea:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}{{#if (hasPrefix "refs/heads/release/v" build.ref)}}{{trimPrefix "refs/heads/release/v" build.ref}}-{{/if}}nightly{{/if}}-linux-amd64-rootless + image: unfoldingword/dcs:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}{{#if (hasPrefix "refs/heads/release/dcs/v" build.ref)}}{{trimPrefix "refs/heads/dcs/v" build.ref}}-{{/if}}nightly{{/if}}-linux-amd64-rootless platform: architecture: amd64 os: linux - - image: gitea/gitea:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}{{#if (hasPrefix "refs/heads/release/v" build.ref)}}{{trimPrefix "refs/heads/release/v" build.ref}}-{{/if}}nightly{{/if}}-linux-arm64-rootless + image: unfoldingword/dcs:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}{{#if (hasPrefix "refs/heads/release/dcs/v" build.ref)}}{{trimPrefix "refs/heads/release/dcs/v" build.ref}}-{{/if}}nightly{{/if}}-linux-arm64-rootless platform: architecture: arm64 os: linux diff --git a/docker/manifest.tmpl b/docker/manifest.tmpl index 08ccf61b57..200590a5ce 100644 --- a/docker/manifest.tmpl +++ b/docker/manifest.tmpl @@ -1,4 +1,4 @@ -image: gitea/gitea:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}{{#if (hasPrefix "refs/heads/release/v" build.ref)}}{{trimPrefix "refs/heads/release/v" build.ref}}-{{/if}}nightly{{/if}} +image: unfoldingword/dcs:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}{{#if (hasPrefix "refs/heads/release/dcs/v" build.ref)}}{{trimPrefix "refs/heads/release/dcs/v" build.ref}}-{{/if}}nightly{{/if}} {{#if build.tags}} {{#unless (contains "-rc" build.tag)}} {{#unless (contains "-dev" build.tag)}} @@ -12,12 +12,12 @@ tags: {{/if}} manifests: - - image: gitea/gitea:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}{{#if (hasPrefix "refs/heads/release/v" build.ref)}}{{trimPrefix "refs/heads/release/v" build.ref}}-{{/if}}nightly{{/if}}-linux-amd64 + image: unfoldingword/dcs:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}{{#if (hasPrefix "refs/heads/release/dcs/v" build.ref)}}{{trimPrefix "refs/heads/release/dcs/v" build.ref}}-{{/if}}nightly{{/if}}-linux-amd64 platform: architecture: amd64 os: linux - - image: gitea/gitea:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}{{#if (hasPrefix "refs/heads/release/v" build.ref)}}{{trimPrefix "refs/heads/release/v" build.ref}}-{{/if}}nightly{{/if}}-linux-arm64 + image: unfoldingword/dcs:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}{{#if (hasPrefix "refs/heads/release/dcs/v" build.ref)}}{{trimPrefix "refs/heads/release/dcs/v" build.ref}}-{{/if}}nightly{{/if}}-linux-arm64 platform: architecture: arm64 os: linux diff --git a/docs/content/administration/config-cheat-sheet.en-us.md b/docs/content/administration/config-cheat-sheet.en-us.md index 617715fbaa..9043cf03f9 100644 --- a/docs/content/administration/config-cheat-sheet.en-us.md +++ b/docs/content/administration/config-cheat-sheet.en-us.md @@ -1412,3 +1412,7 @@ Like `uses: https://gitea.com/actions/checkout@v3` or `uses: http://your-git-ser - `SHOW_FOOTER_TEMPLATE_LOAD_TIME`: **true**: Show time of template execution in the footer. - `ENABLE_SITEMAP`: **true**: Generate sitemap. - `ENABLE_FEED`: **true**: Enable/Disable RSS/Atom feed. + +## DCS (`dcs`) + +- `DOOR43_PREVIEW_URL`: **https://door43.org**: Door43 Preview URL, URL for the website that has the previews. Do not included trailing /'s and any path. diff --git a/go.mod b/go.mod index a3b4656f76..ab66eba572 100644 --- a/go.mod +++ b/go.mod @@ -117,6 +117,7 @@ require ( google.golang.org/protobuf v1.31.0 gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df gopkg.in/ini.v1 v1.67.0 + gopkg.in/yaml.v2 v2.4.0 gopkg.in/yaml.v3 v3.0.1 mvdan.cc/xurls/v2 v2.5.0 strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251 @@ -297,7 +298,6 @@ require ( google.golang.org/genproto/googleapis/rpc v0.0.0-20231012201019-e917dd12ba7a // indirect gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect gopkg.in/warnings.v0 v0.1.2 // indirect - gopkg.in/yaml.v2 v2.4.0 // indirect ) replace github.com/hashicorp/go-version => github.com/6543/go-version v1.3.1 diff --git a/main.go b/main.go index 775c729c56..f2528f8402 100644 --- a/main.go +++ b/main.go @@ -20,6 +20,7 @@ import ( _ "code.gitea.io/gitea/modules/markup/csv" _ "code.gitea.io/gitea/modules/markup/markdown" _ "code.gitea.io/gitea/modules/markup/orgmode" + _ "code.gitea.io/gitea/modules/markup/tsv" // DCS Customizations "github.com/urfave/cli/v2" ) diff --git a/models/catalog_list.go b/models/catalog_list.go new file mode 100644 index 0000000000..f390edeed9 --- /dev/null +++ b/models/catalog_list.go @@ -0,0 +1,116 @@ +// Copyright 2020 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package models + +import ( + "context" + "fmt" + "strings" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/door43metadata" + "code.gitea.io/gitea/models/repo" + + "xorm.io/builder" +) + +// SearchCatalog returns catalog repositories based on search options, +// it returns results in given range and number of total results. +func SearchCatalog(ctx context.Context, opts *door43metadata.SearchCatalogOptions) (repo.Door43MetadataList, int64, error) { + cond := door43metadata.SearchCatalogCondition(opts) + return SearchCatalogByCondition(ctx, opts, cond) +} + +// SearchCatalogByCondition search repositories by condition +func SearchCatalogByCondition(ctx context.Context, opts *door43metadata.SearchCatalogOptions, cond builder.Cond) (repo.Door43MetadataList, int64, error) { + if opts.Page <= 0 { + opts.Page = 1 + } + if opts.PageSize < 0 { + opts.PageSize = 0 + } + + if len(opts.OrderBy) == 0 { + opts.OrderBy = []door43metadata.CatalogOrderBy{door43metadata.CatalogOrderByNewest} + } + + var dms repo.Door43MetadataList + if opts.PageSize > 0 { + dms = make(repo.Door43MetadataList, 0, opts.PageSize) + } + + releaseInfoInner, err := builder.Select("`door43_metadata`.repo_id", "COUNT(*) AS release_count", "MAX(`door43_metadata`.release_date_unix) AS latest_unix"). + From("door43_metadata"). + GroupBy("`door43_metadata`.repo_id"). + Where(builder.Gt{"`door43_metadata`.release_date_unix": 0}). + Where(door43metadata.GetStageCond(opts.Stage)). + ToBoundSQL() + if err != nil { + return nil, 0, err + } + + releaseInfoOuter, err := builder.Select("`door43_metadata`.repo_id", "MAX(release_count) AS release_count", "MAX(latest_unix) AS latest_unix", "MIN(stage) AS latest_stage"). + From("door43_metadata"). + Join("INNER", "("+releaseInfoInner+") release_info_inner", "`release_info_inner`.repo_id = `door43_metadata`.repo_id AND `door43_metadata`.release_date_unix = `release_info_inner`.latest_unix"). + GroupBy("`door43_metadata`.repo_id"). + ToBoundSQL() + if err != nil { + return nil, + 0, err + } + + sess := db.GetEngine(db.DefaultContext). + Join("INNER", "repository", "`repository`.id = `door43_metadata`.repo_id"). + Join("INNER", "user", "`repository`.owner_id = `user`.id"). + Join("LEFT", "release", "`release`.id = `door43_metadata`.release_id"). + Join("INNER", "("+releaseInfoOuter+") release_info", "release_info.repo_id = `door43_metadata`.repo_id"). + Where(cond) + + for _, orderBy := range opts.OrderBy { + sess.OrderBy(orderBy.String()) + } + + if opts.PageSize > 0 || opts.Page > 1 { + sess.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize) + } + count, err := sess.FindAndCount(&dms) + if err != nil { + return nil, 0, fmt.Errorf("FindAndCount: %v", err) + } + + if err = dms.LoadAttributes(ctx); err != nil { + return nil, 0, fmt.Errorf("LoadAttributes: %v", err) + } + + return dms, count, nil +} + +// SearchDoor43MetadataField returns door43metadat field based on search options +func SearchDoor43MetadataField(ctx context.Context, opts *door43metadata.SearchCatalogOptions, field string) ([]string, error) { + cond := door43metadata.SearchCatalogCondition(opts) + return SearchDoor43MetadataFieldByCondition(ctx, opts, cond, field) +} + +// SearchDoor43MetadataFieldByCondition search door43metadata entries by condition for a single field +func SearchDoor43MetadataFieldByCondition(ctx context.Context, opts *door43metadata.SearchCatalogOptions, cond builder.Cond, field string) ([]string, error) { + var results []string + + if !strings.Contains(field, ".") { + field = "`door43_metadata`." + field + } + + sess := db.GetEngine(db.DefaultContext).Table("door43_metadata"). + Select("DISTINCT "+field). + Join("INNER", "repository", "`repository`.id = `door43_metadata`.repo_id"). + Join("INNER", "user", "`repository`.owner_id = `user`.id"). + Where(cond). + OrderBy(field) + + err := sess.Find(&results) + if err != nil { + return nil, fmt.Errorf("find: %v", err) + } + + return results, nil +} diff --git a/models/door43metadata/search.go b/models/door43metadata/search.go new file mode 100644 index 0000000000..710132dab6 --- /dev/null +++ b/models/door43metadata/search.go @@ -0,0 +1,326 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package door43metadata + +import ( + "strings" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/modules/util" + + "xorm.io/builder" +) + +// CatalogOrderBy is used to sort the result +type CatalogOrderBy string + +func (s CatalogOrderBy) String() string { + return string(s) +} + +// Strings for sorting result +const ( + CatalogOrderByTitle CatalogOrderBy = "`door43_metadata`.title ASC" + CatalogOrderByTitleReverse CatalogOrderBy = "`door43_metadata`.title DESC" + CatalogOrderBySubject CatalogOrderBy = "`door43_metadata`.subject ASC" + CatalogOrderBySubjectReverse CatalogOrderBy = "`door43_metadata`.subject DESC" + CatalogOrderByResource CatalogOrderBy = "`door43_metadata`.resource ASC" + CatalogOrderByResourceReverse CatalogOrderBy = "`door43_metadata`.resource DESC" + CatalogOrderByRepoName CatalogOrderBy = "`repository`.lower_name ASC" + CatalogOrderByRepoNameReverse CatalogOrderBy = "`repository`.lower_name DESC" + CatalogOrderByTag CatalogOrderBy = "`door43_metadata`.ref ASC" + CatalogOrderByTagReverse CatalogOrderBy = "`door43_metadata`.ref DESC" + CatalogOrderByReleaseDate CatalogOrderBy = "`door43_metadata`.ref ASC" + CatalogOrderByReleaseDateReverse CatalogOrderBy = "`door43_metadata`.ref DESC" + CatalogOrderByLangCode CatalogOrderBy = "`door43_metadata`.language ASC" + CatalogOrderByLangCodeReverse CatalogOrderBy = "`door43_metadata`.language DESC" + CatalogOrderByOldest CatalogOrderBy = "`door43_metadata`.release_date_unix ASC" + CatalogOrderByNewest CatalogOrderBy = "`door43_metadata`.release_date_unix DESC" + CatalogOrderByReleases CatalogOrderBy = "release_count ASC" + CatalogOrderByReleasesReverse CatalogOrderBy = "release_count DESC" + CatalogOrderByStars CatalogOrderBy = "`repository`.num_stars ASC" + CatalogOrderByStarsReverse CatalogOrderBy = "`repository`.num_stars DESC" + CatalogOrderByForks CatalogOrderBy = "`repository`.num_forks ASC" + CatalogOrderByForksReverse CatalogOrderBy = "`repository`.num_forks DESC" +) + +// SearchCatalogOptions holds the search options +type SearchCatalogOptions struct { + db.ListOptions + RepoID int64 + Keywords []string + Owners []string + Repos []string + Tags []string + Stage Stage + Subjects []string + Resources []string + ContentFormats []string + CheckingLevels []string + Books []string + IncludeHistory bool + MetadataTypes []string + MetadataVersions []string + ShowIngredients util.OptionalBool + Languages []string + LanguageIsGL util.OptionalBool + OrderBy []CatalogOrderBy + PartialMatch bool +} + +// GetMetadataCond Get the metadata condition +func GetMetadataCond(keyword string) builder.Cond { + cond := builder.NewCond() + cond = cond.And(builder.Like{"`door43_metadata`.title", keyword}) + cond = cond.Or(builder.Eq{"`door43_metadata`.resource": keyword}) + cond = cond.Or(builder.Like{"`door43_metadata`.subject", keyword}) + cond = cond.Or(builder.Eq{"`door43_metadata`.language": keyword}) + cond = cond.Or(builder.Like{"`door43_metadata`.language_title", keyword}) + return cond +} + +// SearchCatalogCondition creates a query condition according search repository options +func SearchCatalogCondition(opts *SearchCatalogOptions) builder.Cond { + var repoCond, ownerCond builder.Cond + if opts.RepoID > 0 { + repoCond = builder.Eq{"`repository`.ID": opts.RepoID} + } else { + repoCond = GetRepoCond(opts.Repos, opts.PartialMatch) + ownerCond = GetOwnerCond(opts.Owners, opts.PartialMatch) + } + + keywordCond := builder.NewCond() + for _, keyword := range opts.Keywords { + keywordCond = keywordCond.Or(builder.Like{"`repository`.lower_name", strings.TrimSpace(keyword)}) + keywordCond = keywordCond.Or(builder.Like{"`user`.lower_name", strings.TrimSpace(keyword)}) + keywordCond = keywordCond.Or(GetMetadataCond(keyword)) + } + + stageCond := GetStageCond(opts.Stage) + historyCond := GetHistoryCond(opts.IncludeHistory) + + langIsGLCond := builder.NewCond() + if opts.LanguageIsGL != util.OptionalBoolNone { + langIsGLCond = builder.Eq{"`door43_metadata`.language_is_gl": opts.LanguageIsGL.IsTrue()} + } + + cond := builder.NewCond().And( + GetSubjectCond(opts.Subjects, opts.PartialMatch), + GetResourceCond(opts.Resources), + GetContentFormatCond(opts.ContentFormats, opts.PartialMatch), + GetBookCond(opts.Books), + GetLanguageCond(opts.Languages, opts.PartialMatch), + GetCheckingLevelCond(opts.CheckingLevels), + GetMetadataTypeCond(opts.MetadataTypes, opts.PartialMatch), + GetTagCond(opts.Tags), + repoCond, + ownerCond, + stageCond, + historyCond, + langIsGLCond, + keywordCond, + builder.Eq{"`repository`.is_private": false}, + builder.Eq{"`repository`.is_archived": false}) + + if len(opts.MetadataTypes) > 0 { + cond.And(GetMetadataVersionCond(opts.MetadataVersions, opts.PartialMatch)) + } + + return cond +} + +// SplitAtCommaNotInString split s at commas, ignoring commas in strings. +func SplitAtCommaNotInString(s string, requireSpaceAfterComma bool) []string { + var res []string + var beg int + var inString bool + var prevIsComma bool + + for i := 0; i < len(s); i++ { + if requireSpaceAfterComma && s[i] == ',' && !inString { + prevIsComma = true + continue + } else if s[i] == ' ' { + if prevIsComma { + res = append(res, strings.TrimSpace(s[beg:i-1])) + beg = i + 1 + } else { + continue + } + } else if !requireSpaceAfterComma && s[i] == ',' && !inString { + res = append(res, strings.TrimSpace(s[beg:i])) + beg = i + 1 + } else if s[i] == '"' { + if !inString { + inString = true + } else if i > 0 && s[i-1] != '\\' { + inString = false + } + } + prevIsComma = false + } + return append(res, strings.TrimSpace(s[beg:])) +} + +// GetStageCond gets the condition for the given stage +func GetStageCond(stage Stage) builder.Cond { + return builder.Lte{"`door43_metadata`.stage": stage} +} + +// GetHistoryCond gets the conditions if IncludeHistory is false +func GetHistoryCond(includeHistory bool) builder.Cond { + if includeHistory { + return builder.Lte{"`door43_metadata`.stage": StageBranch} + } + return builder.Eq{"`door43_metadata`.is_latest_for_stage": true} +} + +// GetSubjectCond gets the subject condition +func GetSubjectCond(subjects []string, partialMatch bool) builder.Cond { + subjectCond := builder.NewCond() + for _, subject := range subjects { + for _, v := range strings.Split(subject, ",") { + if partialMatch { + subjectCond = subjectCond.Or(builder.Like{"`door43_metadata`.subject", strings.TrimSpace(v)}) + } else { + subjectCond = subjectCond.Or(builder.Eq{"`door43_metadata`.subject": strings.TrimSpace(v)}) + } + } + } + return subjectCond +} + +// GetResourceCond gets the metdata type condition +func GetResourceCond(resources []string) builder.Cond { + resourceCond := builder.NewCond() + for _, resource := range resources { + for _, v := range strings.Split(resource, ",") { + resourceCond = resourceCond.Or(builder.Eq{"`door43_metadata`.resource": strings.TrimSpace(v)}) + } + } + return resourceCond +} + +// GetContentFormatCond gets the metdata type condition +func GetContentFormatCond(formats []string, partialMatch bool) builder.Cond { + formatCond := builder.NewCond() + for _, format := range formats { + for _, v := range strings.Split(format, ",") { + if partialMatch { + formatCond = formatCond.Or(builder.Like{"`door43_metadata`.content_format", strings.TrimSpace(v)}) + } else { + formatCond = formatCond.Or(builder.Eq{"`door43_metadata`.content_format": strings.TrimSpace(v)}) + } + } + } + return formatCond +} + +// GetMetadataTypeCond gets the metdata type condition +func GetMetadataTypeCond(types []string, partialMatch bool) builder.Cond { + metadataTypeCond := builder.NewCond() + for _, metadataType := range types { + for _, v := range strings.Split(metadataType, ",") { + metadataTypeCond = metadataTypeCond.Or(builder.Eq{"`door43_metadata`.metadata_type": strings.ToLower(v)}) + } + } + return metadataTypeCond +} + +// GetMetadataVersionCond gets the metdata version condition +func GetMetadataVersionCond(versions []string, partialMatch bool) builder.Cond { + versionCond := builder.NewCond() + for _, version := range versions { + for _, v := range strings.Split(version, ",") { + if partialMatch { + versionCond = versionCond.Or(builder.Like{"`door43_metadata`.metadata_version", strings.TrimSpace(v)}) + } else { + versionCond = versionCond.Or(builder.Eq{"`door43_metadata`.metadata_version": strings.TrimSpace(v)}) + } + } + } + return versionCond +} + +// GetLanguageCond gets the language condition +func GetLanguageCond(languages []string, partialMatch bool) builder.Cond { + langCond := builder.NewCond() + for _, lang := range languages { + for _, v := range strings.Split(lang, ",") { + if partialMatch { + langCond = langCond. + Or(builder.Like{"`door43_metadata`.language", strings.TrimSpace(v)}). + Or(builder.Like{"CONCAT(SUBSTRING_INDEX(`repository`.lower_name, '_', 1), '_')", strings.TrimSpace(v) + "\\_"}) + } else { + langCond = langCond. + Or(builder.Eq{"`door43_metadata`.language": strings.TrimSpace(v)}). + Or(builder.Eq{"CONCAT(SUBSTRING_INDEX(`repository`.lower_name, '_', 1), '_')": strings.TrimSpace(v) + "_"}) + } + } + } + return langCond +} + +// GetBookCond gets the book condition +func GetBookCond(books []string) builder.Cond { + bookCond := builder.NewCond() + for _, book := range books { + for _, v := range strings.Split(book, ",") { + bookCond = bookCond.Or(builder.Expr("JSON_CONTAINS(LOWER(JSON_EXTRACT(`door43_metadata`.ingredients, '$')), JSON_OBJECT('identifier', ?))", strings.ToLower(v))) + } + } + return bookCond +} + +// GetCheckingLevelCond gets the checking level condition +func GetCheckingLevelCond(checkingLevels []string) builder.Cond { + checkingCond := builder.NewCond() + for _, checking := range checkingLevels { + for _, v := range strings.Split(checking, ",") { + checkingCond = checkingCond.Or(builder.Gte{"`door43_metadata`.checking_level": v}) + } + } + return checkingCond +} + +// GetTagCond gets the tag condition +func GetTagCond(tags []string) builder.Cond { + tagCond := builder.NewCond() + for _, tag := range tags { + for _, v := range strings.Split(tag, ",") { + tagCond = tagCond.Or(builder.Eq{"`release`.tag_name": v}) + } + } + return tagCond +} + +// GetRepoCond gets the repo condition +func GetRepoCond(repos []string, partialMatch bool) builder.Cond { + repoCond := builder.NewCond() + for _, repo := range repos { + for _, v := range strings.Split(repo, ",") { + if partialMatch { + repoCond = repoCond.Or(builder.Like{"`repository`.lower_name", strings.ToLower(v)}) + } else { + repoCond = repoCond.Or(builder.Eq{"`repository`.lower_name": strings.ToLower(v)}) + } + } + } + return repoCond +} + +// GetOwnerCond gets the owner condition +func GetOwnerCond(owners []string, partialMatch bool) builder.Cond { + ownerCond := builder.NewCond() + for _, owner := range owners { + for _, v := range strings.Split(owner, ",") { + if partialMatch { + ownerCond = ownerCond.Or(builder.Like{"`user`.lower_name", strings.ToLower(v)}) + } else { + ownerCond = ownerCond.Or(builder.Eq{"`user`.lower_name": strings.ToLower(v)}) + } + } + } + return ownerCond +} diff --git a/models/door43metadata/stage.go b/models/door43metadata/stage.go new file mode 100644 index 0000000000..a61eaaba2c --- /dev/null +++ b/models/door43metadata/stage.go @@ -0,0 +1,40 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package door43metadata + +/*** Stage ***/ + +// Stage type for choosing which level of stage to return in the Catalog results +type Stage int + +// Stage values +const ( + StageProd Stage = 1 + StagePreProd Stage = 2 + StageLatest Stage = 3 + StageBranch Stage = 4 +) + +// StageMap map from string to Stage (int) +var StageMap = map[string]Stage{ + "prod": StageProd, + "preprod": StagePreProd, + "latest": StageLatest, + "branch": StageBranch, +} + +// StageToStringMap map from stage (int) to string +var StageToStringMap = map[Stage]string{ + StageProd: "prod", + StagePreProd: "preprod", + StageLatest: "latest", + StageBranch: "branch", +} + +// String returns string repensation of a Stage (int) +func (s *Stage) String() string { + return StageToStringMap[*s] +} + +/*** END Stage ***/ diff --git a/models/fixtures/door43_metadata.yml b/models/fixtures/door43_metadata.yml new file mode 100644 index 0000000000..3fc9e456d4 --- /dev/null +++ b/models/fixtures/door43_metadata.yml @@ -0,0 +1 @@ +[] # empty \ No newline at end of file diff --git a/models/git/refs.go b/models/git/refs.go new file mode 100644 index 0000000000..f01529bb2d --- /dev/null +++ b/models/git/refs.go @@ -0,0 +1,72 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package git + +import ( + "context" + "strings" + + "code.gitea.io/gitea/modules/git" +) + +// CheckReferenceEditability checks if the reference can be modified by the user or any user +func CheckReferenceEditability(ctx context.Context, refName, commitID string, repoID, userID int64) error { + refParts := strings.Split(refName, "/") + + // Must have at least 3 parts, e.g. refs/heads/new-branch + if len(refParts) <= 2 { + return git.ErrInvalidRefName{ + RefName: refName, + Reason: "reference name must contain at least three slash-separted components", + } + } + + // Must start with 'refs/' + if refParts[0] != "refs/" { + return git.ErrInvalidRefName{ + RefName: refName, + Reason: "reference must start with 'refs/'", + } + } + + // 'refs/pull/*' is not allowed + if refParts[1] == "pull" { + return git.ErrInvalidRefName{ + RefName: refName, + Reason: "refs/pull/* is read-only", + } + } + + if refParts[1] == "tags" { + // If the 2nd part is "tags" then we need ot make sure the user is allowed to + // modify this tag (not protected or is admin) + if protectedTags, err := GetProtectedTags(ctx, repoID); err == nil { + isAllowed, err := IsUserAllowedToControlTag(ctx, protectedTags, refName, userID) + if err != nil { + return err + } + if !isAllowed { + return git.ErrProtectedRefName{ + RefName: refName, + Message: "you're not authorized to change a protected tag", + } + } + } + } else if refParts[1] == "heads" { + // If the 2nd part is "heas" then we need to make sure the user is allowed to + // modify this branch (not protected or is admin) + isProtected, err := IsBranchProtected(ctx, repoID, refName) + if err != nil { + return err + } + if !isProtected { + return git.ErrProtectedRefName{ + RefName: refName, + Message: "changes must be made through a pull request", + } + } + } + + return nil +} diff --git a/models/repo/attachment.go b/models/repo/attachment.go index 1a588398c1..69f02d5c4e 100644 --- a/models/repo/attachment.go +++ b/models/repo/attachment.go @@ -8,6 +8,7 @@ import ( "fmt" "net/url" "path" + "strings" // DCS Customizations "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/modules/setting" @@ -30,12 +31,50 @@ type Attachment struct { Size int64 `xorm:"DEFAULT 0"` CreatedUnix timeutil.TimeStamp `xorm:"created"` CustomDownloadURL string `xorm:"-"` + /*** DCS Customizations ***/ + BrowserDownloadURL string `xorm:"-" json:"browser_download_url"` + /*** END DCS Customizations ***/ } func init() { db.RegisterModel(new(Attachment)) } +/*** DCS Customizations ***/ + +func (a *Attachment) AfterLoad() { + if strings.Contains(a.Name, "|http") || strings.Contains(a.Name, "|ftp") { + if name, url, ok := strings.Cut(a.Name, "|"); ok { + a.Name = name + a.BrowserDownloadURL = url + } + } +} + +func (a *Attachment) BeforeInsert() { + if a.Name == "" && a.BrowserDownloadURL != "" { + u, _ := url.Parse(a.BrowserDownloadURL) + a.Name = path.Base(u.Path) + } + if a.BrowserDownloadURL != "" { + a.Name = fmt.Sprintf("%s|%s", a.Name, a.BrowserDownloadURL) + } +} + +func (a *Attachment) BeforeUpdate() { + a.BeforeInsert() +} + +func (a *Attachment) AfterInsert() { + a.AfterLoad() +} + +func (a *Attachment) AfterUpdate() { + a.AfterLoad() +} + +/*** END DCS Customiations ***/ + // IncreaseDownloadCount is update download count + 1 func (a *Attachment) IncreaseDownloadCount(ctx context.Context) error { // Update download count. @@ -58,6 +97,12 @@ func (a *Attachment) RelativePath() string { // DownloadURL returns the download url of the attached file func (a *Attachment) DownloadURL() string { + /*** DCS Customizations ***/ + if a.BrowserDownloadURL != "" { + return a.BrowserDownloadURL + } + /*** END DCS Customizations ***/ + if a.CustomDownloadURL != "" { return a.CustomDownloadURL } diff --git a/models/repo/catalog.go b/models/repo/catalog.go new file mode 100644 index 0000000000..1338c01f3e --- /dev/null +++ b/models/repo/catalog.go @@ -0,0 +1,20 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package repo + +import ( + "net/url" + + "code.gitea.io/gitea/modules/setting" +) + +// CatalogSearchURL returns the repository catalog search API URL +func (repo *Repository) CatalogSearchURL() string { + return setting.AppURL + "api/v1/catalog/search/" + url.PathEscape(repo.OwnerName) + "/" + url.PathEscape(repo.Name) +} + +// CatalogEntryURL returns the repository catalog entry API URL +func (repo *Repository) CatalogEntryURL() string { + return setting.AppURL + "api/v1/catalog/entry/" + url.PathEscape(repo.OwnerName) + "/" + url.PathEscape(repo.Name) +} diff --git a/models/repo/door43metadata.go b/models/repo/door43metadata.go new file mode 100644 index 0000000000..1c5efdf80b --- /dev/null +++ b/models/repo/door43metadata.go @@ -0,0 +1,592 @@ +// Copyright 2020 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package repo + +import ( + "context" + "fmt" + "sort" + "strings" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/door43metadata" + "code.gitea.io/gitea/models/system" + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/timeutil" + + "xorm.io/builder" +) + +/*** INIT DB ***/ + +// InitDoor43Metadata does some db management +func InitDoor43Metadata() error { + switch setting.Database.Type { + case "mysql": + _, err := db.GetEngine(db.DefaultContext).Exec("ALTER TABLE `door43_metadata` MODIFY `metadata` JSON") + if err != nil { + return fmt.Errorf("Error changing door43_metadata metadata column type: %v", err) + } + } + return nil +} + +/*** END INIT DB ***/ + +/*** START Door43Metadata struct and getters ***/ + +// Door43Metadata represents the metadata of repository's release or default branch (ReleaseID = 0). +type Door43Metadata struct { + ID int64 `xorm:"pk autoincr"` + RepoID int64 `xorm:"INDEX UNIQUE(repo_ref) NOT NULL"` + Repo *Repository `xorm:"-"` + ReleaseID int64 `xorm:"NOT NULL"` + Release *Release `xorm:"-"` + Ref string `xorm:"INDEX UNIQUE(repo_ref) NOT NULL"` + RefType string `xorm:"NOT NULL"` + CommitSHA string `xorm:"NOT NULL VARCHAR(40)"` + Stage door43metadata.Stage `xorm:"INDEX NOT NULL"` + MetadataType string `xorm:"INDEX NOT NULL"` + MetadataVersion string `xorm:"NOT NULL"` + Resource string `xorm:"NOT NULL"` + Subject string `xorm:"INDEX NOT NULL"` + Title string `xorm:"NOT NULL"` + Language string `xorm:"INDEX NOT NULL"` + LanguageTitle string `xorm:"NOT NULL"` + LanguageDirection string `xorm:"NOT NULL"` + LanguageIsGL bool `xorm:"NOT NULL"` + ContentFormat string `xorm:"NOT NULL"` + CheckingLevel int `xorm:"NOT NULL"` + Ingredients []*structs.Ingredient `xorm:"JSON"` + Metadata *map[string]interface{} `xorm:"JSON"` + ReleaseDateUnix timeutil.TimeStamp `xorm:"NOT NULL"` + IsLatestForStage bool `xorm:"INDEX"` + IsRepoMetadata bool `xorm:"INDEX"` + CreatedUnix timeutil.TimeStamp `xorm:"INDEX created NOT NULL"` + UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` +} + +func init() { + db.RegisterModel(new(Door43Metadata)) +} + +// LoadRepo gets the repo associated with the door43 metadata entry +func (dm *Door43Metadata) LoadRepo(ctx context.Context) error { + if dm.Repo == nil { + repo, err := GetRepositoryByID(ctx, dm.RepoID) + if err != nil { + return err + } + dm.Repo = repo + if err := dm.Repo.LoadOwner(ctx); err != nil { + return err + } + } + return nil +} + +// GetRelease gets the associated release of a door43 metadata entry +func (dm *Door43Metadata) LoadRelease(ctx context.Context) error { + if dm.ReleaseID > 0 && dm.Release == nil { + rel, err := GetReleaseByID(ctx, dm.ReleaseID) + if err != nil { + return err + } + dm.Release = rel + } + if dm.Release != nil { + dm.Release.Door43Metadata = dm + dm.Release.Repo = dm.Repo + if err := dm.Release.LoadAttributes(ctx); err != nil { + log.Warn("LoadRelease - calling dm.Release.loadAttributes Error: %v\n", err) + return err + } + } + return nil +} + +// LoadAttributes load repo and release attributes for a door43 metadata +func (dm *Door43Metadata) LoadAttributes(ctx context.Context) error { + if err := dm.LoadRepo(ctx); err != nil { + return err + } + if dm.ReleaseID > 0 { + if err := dm.LoadRelease(ctx); err != nil { + log.Error("LoadRelease: %v", err) + return nil + } + } + return nil +} + +// APIURL the api url for a door43 metadata. door43 metadata must have attributes loaded +func (dm *Door43Metadata) APIURL() string { + return fmt.Sprintf("%sapi/v1/catalog/entry/%s/%s/", setting.AppURL, dm.Repo.FullName(), dm.Ref) +} + +// GetTarballURL get the tarball URL of the tag or branch +func (dm *Door43Metadata) GetTarballURL() string { + if dm.RefType == "branch" { + return fmt.Sprintf("%s/archive/%s.tar.gz", dm.Repo.HTMLURL(), dm.CommitSHA[0:10]) + } + return fmt.Sprintf("%s/archive/%s.tar.gz", dm.Repo.HTMLURL(), dm.Ref) +} + +// GetZipballURL get the zipball URL of the tag or branch +func (dm *Door43Metadata) GetZipballURL() string { + if dm.RefType == "branch" { + return fmt.Sprintf("%s/archive/%s.zip", dm.Repo.HTMLURL(), dm.CommitSHA[0:10]) + } + return fmt.Sprintf("%s/archive/%s.zip", dm.Repo.HTMLURL(), dm.Ref) +} + +// GetReleaseURL get the URL the release API +func (dm *Door43Metadata) GetReleaseURL(ctx context.Context) string { + if dm.ReleaseID > 0 { + if dm.Release != nil { + return dm.Release.APIURL() + } + if err := dm.LoadRepo(ctx); err == nil { + return fmt.Sprintf("%sapi/v1/repos/%s/releases/%d", setting.AppURL, dm.Repo.FullName(), dm.ReleaseID) + } + } + return "" +} + +// GetMetadataURL gets the url to the raw manifest.yaml file +func (dm *Door43Metadata) GetMetadataURL() string { + // Use CommitID because of race condition to a branch + if dm.MetadataType == "rc" { + return fmt.Sprintf("%s/raw/commit/%s/manifest.yaml", dm.Repo.HTMLURL(), dm.CommitSHA) + } + // so far this means we have a ts or tc metadata entry, but need to change for scripture burrito! + return fmt.Sprintf("%s/raw/commit/%s/manifest.json", dm.Repo.HTMLURL(), dm.CommitSHA) +} + +// GetMetadataTypeTitle returns the metadata type title +func (dm *Door43Metadata) GetMetadataTypeTitle() string { + switch dm.MetadataType { + case "ts": + return "translationStudio" + case "tc": + return "translationCore" + case "rc": + return "Resource Container" + case "sb": + return "Scripture Burrito" + default: + return dm.MetadataType + } +} + +// GetMetadataTypeIcon returns the metadata type icon +func (dm *Door43Metadata) GetMetadataTypeIcon() string { + switch dm.MetadataType { + case "rc": + return "rc.png" + case "ts": + return "ts.png" + case "tc": + return "tc.png" + case "sb": + return "sb.png" + default: + return "uw.png" + } +} + +// GetMetadataJSONString returns the JSON in string format of a map +func (dm *Door43Metadata) GetMetadataJSONString() string { + json, _ := json.MarshalIndent(dm.Metadata, "", " ") + return string(json) +} + +// GetMetadataJSONURL gets the json representation of the contents of the manifest.yaml file +func (dm *Door43Metadata) GetMetadataJSONURL() string { + return fmt.Sprintf("%smetadata/", dm.APIURL()) +} + +// GetMetadataAPIContentsURL gets the metadata API contents URL of the manifest.yaml file +func (dm *Door43Metadata) GetMetadataAPIContentsURL() string { + return fmt.Sprintf("%s/contents/manifest.yaml?ref=%s", dm.Repo.APIURL(), dm.Ref) +} + +// StageStr gets the string representation of a stage int +func (dm *Door43Metadata) StageStr() string { + return door43metadata.StageToStringMap[dm.Stage] +} + +// GetGitTreesURL gets the git trees URL for a repo and branch or tag for all files +func (dm *Door43Metadata) GetGitTreesURL() string { + if dm.RefType == "branch" { + return fmt.Sprintf("%s/git/trees/%s?recursive=1&per_page=99999", dm.Repo.APIURL(), dm.CommitSHA[0:10]) + } + return fmt.Sprintf("%s/git/trees/%s?recursive=1&per_page=99999", dm.Repo.APIURL(), dm.Ref) +} + +// GetContentsURL gets the contents URL for a repo and branch or tag for all files +func (dm *Door43Metadata) GetContentsURL() string { + if dm.RefType == "branch" { + return fmt.Sprintf("%s/contents?ref=%s", dm.Repo.APIURL(), dm.CommitSHA[0:10]) + } + return fmt.Sprintf("%s/contents?ref=%s", dm.Repo.APIURL(), dm.Ref) +} + +// GetIngredientsIdentifierList get the identifiers of the igredients and returns them as a list of strings +func (dm *Door43Metadata) GetIngredientsIdentifierList() []string { + var ids []string + if len(dm.Ingredients) > 0 { + for _, ing := range dm.Ingredients { + ids = append(ids, ing.Identifier) + } + } + return ids +} + +// GetIngredientsAsString get the integredients of the repo and returns the identifiers as a comma-delimited string +func (dm *Door43Metadata) GetIngredientsAsString() string { + ids := dm.GetIngredientsIdentifierList() + return strings.Join(ids, ", ") +} + +func (dm *Door43Metadata) GetAlignmentCounts() map[string]int { + counts := map[string]int{} + if len(dm.Ingredients) > 0 { + for _, ing := range dm.Ingredients { + if ing.AlignmentCount != nil { + counts[ing.Identifier] = *ing.AlignmentCount + } + } + } + return counts +} + +// GetReleaseCount returns the count of releases of repository of the Door43Metadata's stage +func (dm *Door43Metadata) GetReleaseCount() (int64, error) { + stageCond := door43metadata.GetStageCond(dm.Stage) + return db.GetEngine(db.DefaultContext).Join("LEFT", "release", "`release`.id = `door43_metadata`.release_id"). + Where(builder.And(builder.Eq{"`door43_metadata`.repo_id": dm.RepoID}, stageCond)). + Count(&Door43Metadata{}) +} + +// GetMetadataFilename returns the name of the metadata file, e.g. manifest.yaml or metadata.json +func (dm *Door43Metadata) GetMetadataFilename() string { + if dm.MetadataType == "rc" { + return "manifest.yaml" + } else if dm.MetadataType == "sb" { + return "metadata.json" + } else if dm.MetadataType == "tc" || dm.MetadataType == "ts" { + return "manifest.json" + } + return "" +} + +// IsDoor43MetadataExist returns true if door43 metadata with given release ID already exists. +func IsDoor43MetadataExist(ctx context.Context, repoID, releaseID int64) (bool, error) { + return db.GetEngine(ctx).Get(&Door43Metadata{RepoID: repoID, ReleaseID: releaseID}) +} + +// InsertDoor43Metadata inserts a door43 metadata +func InsertDoor43Metadata(ctx context.Context, dm *Door43Metadata) error { + if id, err := db.GetEngine(ctx).Insert(dm); err != nil { + return err + } else if id > 0 { + dm.ID = id + if err := dm.LoadRepo(ctx); err != nil { + return err + } + if dm.ReleaseID > 0 { + if err := system.CreateRepositoryNotice("Door43 Metadata created for repo: %s, tag: %s", dm.Repo.Name, dm.Ref); err != nil { + return err + } + } else { + if err := system.CreateRepositoryNotice("Door43 Metadata created for repo: %s, branch: %s", dm.Repo.Name, dm.Ref); err != nil { + return err + } + } + } + return nil +} + +// InsertDoor43Metadatas inserts door43 metadatas +func InsertDoor43Metadatas(ctx context.Context, dms []*Door43Metadata) error { + _, err := db.GetEngine(ctx).Insert(dms) + return err +} + +// UpdateDoor43MetadataCols update door43 metadata according special columns +func UpdateDoor43MetadataCols(ctx context.Context, dm *Door43Metadata, cols ...string) error { + id, err := db.GetEngine(ctx).ID(dm.ID).Cols(cols...).Update(dm) + if id > 0 && dm.ReleaseID > 0 { + err := dm.LoadRepo(ctx) + if err != nil { + return err + } + if err := system.CreateRepositoryNotice("Door43 Metadata updated for repo: %s, tag: %s", dm.Repo.Name, dm.Ref); err != nil { + log.Error("CreateRepositoryNotice: %v", err) + } + } + return err +} + +// UpdateDoor43Metadata update a;ll door43 metadata +func UpdateDoor43Metadata(ctx context.Context, dm *Door43Metadata) error { + id, err := db.GetEngine(ctx).ID(dm.ID).AllCols().Update(dm) + if id > 0 && dm.ReleaseID > 0 { + err := dm.LoadRepo(ctx) + if err != nil { + return err + } + if err := system.CreateRepositoryNotice("Door43 Metadata updated for repo: %s, tag: %s", dm.Repo.Name, dm.Ref); err != nil { + log.Error("CreateRepositoryNotice: %v", err) + } + } + return err +} + +// GetDoor43MetadataByID returns door43 metadata with given ID. +func GetDoor43MetadataByID(ctx context.Context, id, repoID int64) (*Door43Metadata, error) { + dm := new(Door43Metadata) + has, err := db.GetEngine(ctx). + ID(id). + Get(dm) + if err != nil { + return nil, err + } else if !has { + return nil, ErrDoor43MetadataNotExist{id, repoID, ""} + } + return dm, nil +} + +// GetMostRecentDoor43MetadataByStage returns the most recent Door43Metadatas of a given stage for a repo +func GetMostRecentDoor43MetadataByStage(ctx context.Context, repoID int64, stage door43metadata.Stage) (*Door43Metadata, error) { + dm := &Door43Metadata{RepoID: repoID, Stage: stage} + has, err := db.GetEngine(ctx).Desc("release_date_unix").Get(dm) + if err != nil { + return nil, err + } else if !has { + return nil, ErrDoor43MetadataNotExist{0, repoID, ""} + } + return dm, nil +} + +// GetDoor43MetdataLatestInStage(ctx context.Context, repoID) + +// GetDoor43MetadataByRepoIDAndReleaseID returns the metadata of a given release ID (0 = default branch). +func GetDoor43MetadataByRepoIDAndRef(ctx context.Context, repoID int64, ref string) (*Door43Metadata, error) { + dm := &Door43Metadata{ + RepoID: repoID, + Ref: ref, + } + has, err := db.GetEngine(ctx).Get(dm) + if err != nil { + return nil, err + } else if !has { + return nil, ErrDoor43MetadataNotExist{0, repoID, ref} + } + return dm, nil +} + +// GetDoor43MetadataMapValues gets the values of a Door43Metadata map +func GetDoor43MetadataMapValues(m map[int64]*Door43Metadata) []*Door43Metadata { + values := make([]*Door43Metadata, 0, len(m)) + for _, v := range m { + values = append(values, v) + } + return values +} + +/*** END Door43Metadata struct and getters ***/ + +/*** START Door43MetadataList ***/ + +// Door43MetadataList contains a list of repositories +type Door43MetadataList []*Door43Metadata + +func (dms Door43MetadataList) Len() int { + return len(dms) +} + +func (dms Door43MetadataList) Less(i, j int) bool { + return dms[i].Repo.FullName() < dms[j].Repo.FullName() +} + +func (dms Door43MetadataList) Swap(i, j int) { + dms[i], dms[j] = dms[j], dms[i] +} + +// Door43MetadataListOfMap make list from values of map +func Door43MetadataListOfMap(dmMap map[int64]*Door43Metadata) Door43MetadataList { + return Door43MetadataList(GetDoor43MetadataMapValues(dmMap)) +} + +// LoadAttributes loads the attributes for the given Door43MetadataList +func (dms Door43MetadataList) LoadAttributes(ctx context.Context) error { + if len(dms) == 0 { + return nil + } + var lastErr error + for _, dm := range dms { + if err := dm.LoadAttributes(ctx); err != nil && lastErr == nil { + lastErr = err + } + } + return lastErr +} + +/*** END Door43MEtadataList ***/ + +/*** Door43MetadataSorter ***/ +type Door43MetadataSorter struct { + dms []*Door43Metadata +} + +func (dms *Door43MetadataSorter) Len() int { + return len(dms.dms) +} + +func (dms *Door43MetadataSorter) Less(i, j int) bool { + return dms.dms[i].UpdatedUnix > dms.dms[j].UpdatedUnix +} + +func (dms *Door43MetadataSorter) Swap(i, j int) { + dms.dms[i], dms.dms[j] = dms.dms[j], dms.dms[i] +} + +// SortDoorMetadatas sorts door43 metadatas by number of commits and created time. +func SortDoorMetadatas(dms []*Door43Metadata) { + sorter := &Door43MetadataSorter{dms: dms} + sort.Sort(sorter) +} + +// DeleteDoor43MetadataByID deletes a metadata from database by given ID. +func DeleteDoor43MetadataByID(ctx context.Context, id, repoID int64) error { + dm, err := GetDoor43MetadataByID(ctx, id, repoID) + if err != nil || dm.RepoID != repoID { + return err + } + return DeleteDoor43Metadata(ctx, dm) +} + +// DeleteDoor43Metadata deletes a metadata from database by given ID. +func DeleteDoor43Metadata(ctx context.Context, dm *Door43Metadata) error { + id, err := db.GetEngine(ctx).Delete(dm) + if id > 0 && dm.ReleaseID > 0 { + if err := dm.LoadRepo(ctx); err != nil { + return err + } else if err := system.CreateRepositoryNotice("Door43 Metadata deleted for repo: %s, tag: %s", dm.Repo.Name, dm.Ref); err != nil { + log.Error("CreateRepositoryNotice: %v", err) + } + } + return err +} + +// DeleteDoor43MetadataByRepoRef deletes a metadata from database by given repo and ref. +func DeleteDoor43MetadataByRepoRef(ctx context.Context, repo *Repository, ref string) error { + dm, err := GetDoor43MetadataByRepoIDAndRef(ctx, repo.ID, ref) + if err != nil { + if !IsErrDoor43MetadataNotExist(err) { + return err + } + return nil + } + _, err = db.GetEngine(db.DefaultContext).ID(dm.ID).Delete(dm) + return err +} + +// DeleteAllDoor43MetadatasByRepoID deletes all metadatas from database for a repo by given repo ID. +func DeleteAllDoor43MetadatasByRepoID(ctx context.Context, repoID int64) (int64, error) { + return db.GetEngine(ctx).Delete(Door43Metadata{RepoID: repoID}) +} + +// GetReposForMetadata gets all the repos to process for metadata +func GetReposForMetadata(ctx context.Context) ([]*Repository, error) { + var repos []*Repository + err := db.GetEngine(ctx). + Join("INNER", "user", "`user`.id = `repository`.owner_id"). + Where(builder.Eq{"`repository`.is_archived": 0}.And(builder.Eq{"`repository`.is_private": 0})). + OrderBy("CASE WHEN `user`.lower_name = 'unfoldingword' THEN 0 " + + "WHEN `user`.lower_name = 'door43-catalog' THEN 1 " + + "WHEN `user`.lower_name LIKE '%_gl' THEN 2 " + + "ELSE 3 END"). + OrderBy("`user`.type DESC"). + OrderBy("`user`.lower_name"). + OrderBy("`repository`.lower_name"). + Find(&repos) + return repos, err +} + +// GetRepoReleaseTagsForMetadata gets the releases tags for a repo used for getting metadata +func GetRepoReleaseTagsForMetadata(ctx context.Context, repoID int64) ([]string, error) { + var releases []*Release + err := db.GetEngine(ctx). + Join("INNER", "repository", "`repository`.id = `release`.repo_id"). + Where(builder.Eq{"`release`.is_tag": 0}.And(builder.Eq{"`repository`.id": repoID})). + OrderBy("`release`.created_unix"). + Find(&releases) + if err != nil { + return nil, err + } + + tags := make([]string, len(releases)) + for idx, release := range releases { + tags[idx] = release.TagName + } + + return tags, nil +} + +/*** Error Structs & Functions ***/ + +// ErrDoor43MetadataAlreadyExist represents a "Door43MetadataAlreadyExist" kind of error. +type ErrDoor43MetadataAlreadyExist struct { + ReleaseID int64 +} + +// IsErrDoor43MetadataAlreadyExist checks if an error is a ErrDoor43MetadataAlreadyExist. +func IsErrDoor43MetadataAlreadyExist(err error) bool { + _, ok := err.(ErrDoor43MetadataAlreadyExist) + return ok +} + +func (err ErrDoor43MetadataAlreadyExist) Error() string { + return fmt.Sprintf("Metadata for release already exists [release: %d]", err.ReleaseID) +} + +// ErrDoor43MetadataNotExist represents a "Door43MetadataNotExist" kind of error. +type ErrDoor43MetadataNotExist struct { + ID int64 + RepoID int64 + Ref string +} + +// IsErrDoor43MetadataNotExist checks if an error is a ErrDoor43MetadataNotExist. +func IsErrDoor43MetadataNotExist(err error) bool { + _, ok := err.(ErrDoor43MetadataNotExist) + return ok +} + +func (err ErrDoor43MetadataNotExist) Error() string { + return fmt.Sprintf("door43 metadata does not exist [id: %d, repo_id: %d, ref: %s]", err.ID, err.RepoID, err.Ref) +} + +// ErrInvalidRelease represents a "InvalidRelease" kind of error. +type ErrInvalidRelease struct { + ReleaseID int64 +} + +// IsErrInvalidRelease checks if an error is a ErrInvalidRelease. +func IsErrInvalidRelease(err error) bool { + _, ok := err.(ErrInvalidRelease) + return ok +} + +func (err ErrInvalidRelease) Error() string { + return fmt.Sprintf("metadata release id is not valid [release_id: %d]", err.ReleaseID) +} + +/*** END Error Structs & Functions ***/ diff --git a/models/repo/release.go b/models/repo/release.go index ff31ec4510..fcb39ad847 100644 --- a/models/repo/release.go +++ b/models/repo/release.go @@ -85,6 +85,7 @@ type Release struct { IsTag bool `xorm:"NOT NULL DEFAULT false"` // will be true only if the record is a tag and has no related releases Attachments []*Attachment `xorm:"-"` CreatedUnix timeutil.TimeStamp `xorm:"INDEX"` + Door43Metadata *Door43Metadata `xorm:"-"` } func init() { @@ -100,6 +101,14 @@ func (r *Release) LoadAttributes(ctx context.Context) error { return err } } + /*** DCS Customizations ***/ + if r.Door43Metadata == nil { + r.Door43Metadata, err = GetDoor43MetadataByRepoIDAndRef(ctx, r.RepoID, r.TagName) + if err != nil && !IsErrDoor43MetadataNotExist(err) { + return err + } + } + /*** END DCS Customizations ***/ if r.Publisher == nil { r.Publisher, err = user_model.GetUserByID(ctx, r.PublisherID) if err != nil { diff --git a/models/repo/release_dcs.go b/models/repo/release_dcs.go new file mode 100644 index 0000000000..27dc579117 --- /dev/null +++ b/models/repo/release_dcs.go @@ -0,0 +1,10 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package repo + +import "regexp" + +func (r *Release) IsCatalogVersion() bool { + return regexp.MustCompile(`^v\d`).Match([]byte(r.TagName)) || regexp.MustCompile(`^\d\d\d\d`).Match([]byte(r.TagName)) +} diff --git a/models/repo/repo.go b/models/repo/repo.go index c4b215e074..c7d8c63c70 100644 --- a/models/repo/repo.go +++ b/models/repo/repo.go @@ -188,6 +188,12 @@ type Repository struct { CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` ArchivedUnix timeutil.TimeStamp `xorm:"DEFAULT 0"` + /*** DCS Customizations ***/ + LatestProdDM *Door43Metadata `xorm:"-"` + LatestPreprodDM *Door43Metadata `xorm:"-"` + DefaultBranchDM *Door43Metadata `xorm:"-"` + RepoDM *Door43Metadata `xorm:"-"` + /*** DCS Customizations ***/ } func init() { @@ -297,6 +303,13 @@ func (repo *Repository) LoadAttributes(ctx context.Context) error { break } } + + /*** DCS Customizations ***/ + if err := repo.LoadLatestDMs(ctx); err != nil { + return err + } + /*** END DCS Customizations ***/ + return nil } diff --git a/models/repo/repo_dcs.go b/models/repo/repo_dcs.go new file mode 100644 index 0000000000..131c59eda9 --- /dev/null +++ b/models/repo/repo_dcs.go @@ -0,0 +1,101 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package repo + +import ( + "context" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/door43metadata" + "code.gitea.io/gitea/modules/dcs" + + "xorm.io/builder" +) + +// LoadLatestDMs loads the latest DMs +func (repo *Repository) LoadLatestDMs(ctx context.Context) error { + if repo.LatestProdDM == nil { + dm := &Door43Metadata{RepoID: repo.ID, Stage: door43metadata.StageProd, IsLatestForStage: true} + has, err := db.GetEngine(ctx).Desc("release_date_unix").Get(dm) + if err != nil { + return err + } + if has { + repo.LatestProdDM = dm + } + } + + if repo.LatestPreprodDM == nil { + dm := &Door43Metadata{RepoID: repo.ID, Stage: door43metadata.StagePreProd, IsLatestForStage: true} + has, err := db.GetEngine(ctx).Desc("release_date_unix").Get(dm) + if err != nil { + return err + } + if has { + repo.LatestPreprodDM = dm + } + } + + if repo.DefaultBranchDM == nil { + dm := &Door43Metadata{RepoID: repo.ID, Stage: door43metadata.StageLatest, IsLatestForStage: true} + has, err := db.GetEngine(ctx).Desc("release_date_unix").Get(dm) + if err != nil { + return err + } + if has { + repo.DefaultBranchDM = dm + } + } + + if repo.RepoDM == nil { + dm := &Door43Metadata{} + has, err := db.GetEngine(ctx). + Where(builder.Eq{"repo_id": repo.ID}). + And(builder.Eq{"is_repo_metadata": true}). + Desc("release_date_unix"). + Get(dm) + if err != nil { + return err + } + if has && dm != nil { + repo.RepoDM = dm + } else { + title := repo.Name + metadataType := dcs.GetMetadataTypeFromRepoName(repo.Name) + metadataVersion := dcs.GetDefaultMetadataVersionForType(metadataType) + subject := dcs.GetSubjectFromRepoName(repo.Name) + lang := dcs.GetLanguageFromRepoName(repo.Name) + langDir := dcs.GetLanguageDirection(lang) + langTitle := dcs.GetLanguageTitle(lang) + langIsGL := dcs.LanguageIsGL(lang) + repo.RepoDM = &Door43Metadata{ + RepoID: repo.ID, + MetadataType: metadataType, + MetadataVersion: metadataVersion, + Title: title, + Subject: subject, + Language: lang, + LanguageDirection: langDir, + LanguageTitle: langTitle, + LanguageIsGL: langIsGL, + } + } + } + + return nil +} + +// LoadLatestDMs loads the latest Door43Metadatas for the given RepositoryList +func (rl RepositoryList) LoadLatestDMs(ctx context.Context) error { + if rl.Len() == 0 { + return nil + } + var lastErr error + for _, repo := range rl { + if err := repo.LoadLatestDMs(ctx); err != nil && lastErr == nil { + lastErr = err + } + } + return lastErr +} diff --git a/models/repo/repo_list.go b/models/repo/repo_list.go index 1668c23c77..1dd619ca2c 100644 --- a/models/repo/repo_list.go +++ b/models/repo/repo_list.go @@ -9,6 +9,7 @@ import ( "strings" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/door43metadata" "code.gitea.io/gitea/models/perm" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" @@ -161,7 +162,18 @@ type SearchRepoOptions struct { // False -> include just has no milestone HasMilestones util.OptionalBool // LowerNames represents valid lower names to restrict to - LowerNames []string + LowerNames []string + Owners []string // DCS Customizations + Repos []string // DCS Customizations + Subjects []string // DCS Customizations + Resources []string // DCS Customizations + ContentFormats []string // DCS Customization + Books []string // DCS Customizations + Languages []string // DCS Customizations + LanguageIsGL util.OptionalBool // DCS Customizations + // query metadata type and version + MetadataTypes []string // DCS Customizations + MetadataVersions []string // DCS Customizations // When specified true, apply some filters over the conditions: // - Don't show forks, when opts.Fork is OptionalBoolNone. // - Do not display repositories that don't have a description, an icon and topics. @@ -369,12 +381,12 @@ func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond { // Restrict to starred repositories if opts.StarredByID > 0 { - cond = cond.And(builder.In("id", builder.Select("repo_id").From("star").Where(builder.Eq{"uid": opts.StarredByID}))) + cond = cond.And(builder.In("`repository`.id", builder.Select("repo_id").From("star").Where(builder.Eq{"uid": opts.StarredByID}))) } // Restrict to watched repositories if opts.WatchedByID > 0 { - cond = cond.And(builder.In("id", builder.Select("repo_id").From("watch").Where(builder.Eq{"user_id": opts.WatchedByID}))) + cond = cond.And(builder.In("`repository`.id", builder.Select("repo_id").From("watch").Where(builder.Eq{"user_id": opts.WatchedByID}))) } // Restrict repositories to those the OwnerID owns or contributes to as per opts.Collaborate @@ -442,23 +454,26 @@ func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond { Where(subQueryCond). GroupBy("repo_topic.repo_id") - keywordCond := builder.In("id", subQuery) + keywordCond := builder.In("`repository`.id", subQuery) // DCS Customizations - adds `repository`. if !opts.TopicOnly { likes := builder.NewCond() for _, v := range strings.Split(opts.Keyword, ",") { - likes = likes.Or(builder.Like{"lower_name", strings.ToLower(v)}) + likes = likes.Or(builder.Like{"`repository`.lower_name", strings.ToLower(v)}) // DCS Customizations - adds `repository`. // If the string looks like "org/repo", match against that pattern too if opts.TeamID == 0 && strings.Count(opts.Keyword, "/") == 1 { pieces := strings.Split(opts.Keyword, "/") ownerName := pieces[0] repoName := pieces[1] - likes = likes.Or(builder.And(builder.Like{"owner_name", strings.ToLower(ownerName)}, builder.Like{"lower_name", strings.ToLower(repoName)})) + likes = likes.Or(builder.And(builder.Like{"owner_name", strings.ToLower(ownerName)}, builder.Like{"`repository`.lower_name", strings.ToLower(repoName)})) // DCS Customizations - adds `repository`. } if opts.IncludeDescription { - likes = likes.Or(builder.Like{"LOWER(description)", strings.ToLower(v)}) + likes = likes.Or(builder.Like{"LOWER(`repository`.description)", strings.ToLower(v)}) // DCS Customizations - adds `repository`. } + /*** DCS Customizations ***/ + likes = likes.Or(door43metadata.GetMetadataCond(v)) + /*** END DCS Customizations ***/ } keywordCond = keywordCond.Or(likes) } @@ -466,10 +481,10 @@ func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond { } if opts.Language != "" { - cond = cond.And(builder.In("id", builder. - Select("repo_id"). - From("language_stat"). - Where(builder.Eq{"language": opts.Language}).And(builder.Eq{"is_primary": true}))) + cond = cond.And(builder.In("`repository`.id", builder. // DCS Customizations - Adds `repository`. + Select("repo_id"). + From("language_stat"). + Where(builder.Eq{"language": opts.Language}).And(builder.Eq{"is_primary": true}))) } if opts.Fork != util.OptionalBoolNone || opts.OnlyShowRelevant { @@ -499,6 +514,25 @@ func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond { cond = cond.And(builder.Eq{"num_milestones": 0}.Or(builder.IsNull{"num_milestones"})) } + /*** DCS Customizations ***/ + cond = cond.And(door43metadata.GetRepoCond(opts.Repos, false), + door43metadata.GetOwnerCond(opts.Owners, false), + door43metadata.GetSubjectCond(opts.Subjects, false), + door43metadata.GetResourceCond(opts.Resources), + door43metadata.GetContentFormatCond(opts.ContentFormats, false), + door43metadata.GetBookCond(opts.Books), + door43metadata.GetLanguageCond(opts.Languages, false), + door43metadata.GetMetadataTypeCond(opts.MetadataTypes, false)) + + if len(opts.MetadataTypes) > 0 { + cond.And(door43metadata.GetMetadataVersionCond(opts.MetadataVersions, false)) + } + + if opts.LanguageIsGL != util.OptionalBoolNone { + cond = cond.And(builder.Eq{"`door43_metadata`.is_gl`": opts.LanguageIsGL.IsTrue()}) + } + /*** EMD DCS Customizations ***/ + if opts.OnlyShowRelevant { // Only show a repo that has at least a topic, an icon, or a description subQueryCond := builder.NewCond() @@ -575,6 +609,10 @@ func searchRepositoryByCondition(ctx context.Context, opts *SearchRepoOptions, c opts.OrderBy = db.SearchOrderByAlphabetically } + /*** DCS Customizations - Since we join with more tables we need to prefix the OrderBy with `repository` ***/ + opts.OrderBy = "`repository`." + opts.OrderBy + /*** END DCS Customizaitons ***/ + args := make([]any, 0) if opts.PriorityOwnerID > 0 { opts.OrderBy = db.SearchOrderBy(fmt.Sprintf("CASE WHEN owner_id = ? THEN 0 ELSE owner_id END, %s", opts.OrderBy)) @@ -592,6 +630,8 @@ func searchRepositoryByCondition(ctx context.Context, opts *SearchRepoOptions, c if opts.PageSize > 0 { var err error count, err = sess. + Join("INNER", "user", "`user`.id = `repository`.owner_id"). // DCS Customizaitons - for owner search + Join("LEFT", "door43_metadata", builder.Expr("`door43_metadata`.repo_id = `repository`.id AND `door43_metadata`.is_repo_metadata = ?", true)). // DCS Customizations Where(cond). Count(new(Repository)) if err != nil { @@ -599,7 +639,9 @@ func searchRepositoryByCondition(ctx context.Context, opts *SearchRepoOptions, c } } - sess = sess.Where(cond).OrderBy(opts.OrderBy.String(), args...) + sess = sess.Where(cond).OrderBy(opts.OrderBy.String(), args...). // DCS Customizations - Adds . + Join("INNER", "user", "`user`.id = `repository`.owner_id"). // DCS Customizaitons - for owner search + Join("LEFT", "door43_metadata", builder.Expr("`door43_metadata`.repo_id = `repository`.id AND `door43_metadata`.is_repo_metadata = ?", true)) // DCS Customizations if opts.PageSize > 0 { sess = sess.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize) } @@ -688,7 +730,7 @@ func SearchRepositoryIDs(ctx context.Context, opts *SearchRepoOptions) ([]int64, } ids := make([]int64, 0, defaultSize) - err = sess.Select("id").Table("repository").Find(&ids) + err = sess.Select("`repository`.id").Table("repository").Find(&ids) // DCS Customizations if opts.PageSize <= 0 { count = int64(len(ids)) } @@ -731,7 +773,7 @@ func GetUserRepositories(ctx context.Context, opts *SearchRepoOptions) (Reposito } if opts.LowerNames != nil && len(opts.LowerNames) > 0 { - cond = cond.And(builder.In("lower_name", opts.LowerNames)) + cond = cond.And(builder.In("`repository`.lower_name", opts.LowerNames)) } sess := db.GetEngine(ctx) diff --git a/models/user/search.go b/models/user/search.go index 0fa278c257..137df70737 100644 --- a/models/user/search.go +++ b/models/user/search.go @@ -37,6 +37,14 @@ type SearchUserOptions struct { IsProhibitLogin util.OptionalBool IncludeReserved bool + /*** DCS CUSTOMIZATIONS ***/ + IsSpamUser util.OptionalBool + RepoLanguages []string // Find repos that have the given language ids in a repo's manifest + RepoSubjects []string // Find repos that have the given subjects in a repo's manifest + RepoMetadataTypes []string // Find repos that have the given metadata types in a repo's manifest + RepoLanguageIsGL util.OptionalBool // Find repos that are gateway languages + /*** END DCS CUSTOMIZATIONS ***/ + ExtraParamStrings map[string]string } @@ -102,6 +110,12 @@ func (opts *SearchUserOptions) toSearchQueryBase(ctx context.Context) *xorm.Sess cond = cond.And(builder.Eq{"prohibit_login": opts.IsProhibitLogin.IsTrue()}) } + /*** DCS Customizations ***/ + if opts.IsSpamUser.IsTrue() { + cond = cond.And(builder.Expr("type = 0 AND description != '' AND website != ''")) + } + /*** END DCS Customizations ***/ + e := db.GetEngine(ctx) if opts.IsTwoFactorEnabled.IsNone() { return e.Where(cond) @@ -141,6 +155,42 @@ func SearchUsers(ctx context.Context, opts *SearchUserOptions) (users []*User, _ sessQuery = db.SetSessionPagination(sessQuery, opts) } + /*** DCS Customizations ***/ + if len(opts.RepoLanguages) > 0 || len(opts.RepoSubjects) > 0 || len(opts.RepoMetadataTypes) > 0 { + repoLangsCond := builder.NewCond() + for _, values := range opts.RepoLanguages { + for _, value := range strings.Split(values, ",") { + repoLangsCond = repoLangsCond.Or(builder.Eq{"`door43_metadata`.language": strings.TrimSpace(value)}) + } + } + repoSubsCond := builder.NewCond() + for _, values := range opts.RepoSubjects { + for _, value := range strings.Split(values, ",") { + repoSubsCond = repoSubsCond.Or(builder.Eq{"`door43_metadata`.subject": strings.TrimSpace(value)}) + } + } + repoTypesCond := builder.NewCond() + for _, values := range opts.RepoMetadataTypes { + for _, value := range strings.Split(values, ",") { + repoTypesCond = repoTypesCond.Or(builder.Eq{"`door43_metadata`.metadata_type": strings.TrimSpace(value)}) + } + } + metadataCond := builder.NewCond().And( + repoLangsCond, + repoSubsCond, + repoTypesCond, + ) + if opts.RepoLanguageIsGL != util.OptionalBoolNone { + metadataCond = metadataCond.And(builder.Eq{"`door43_metadata`.is_gl": opts.RepoLanguageIsGL.IsTrue()}) + } + metadataSelect := builder.Select("owner_id"). + From("repository"). + Join("INNER", "`door43_metadata`", "repo_id = `repository`.id"). + Where(metadataCond) + sessQuery.In("`user`.id", metadataSelect) + } + /*** END DCS Customizations ***/ + // the sql may contain JOIN, so we must only select User related columns sessQuery = sessQuery.Select("`user`.*") users = make([]*User, 0, opts.PageSize) diff --git a/models/user_dcs.go b/models/user_dcs.go new file mode 100644 index 0000000000..d3b7f42708 --- /dev/null +++ b/models/user_dcs.go @@ -0,0 +1,38 @@ +// Copyright 2020 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package models + +import ( + "context" + + "code.gitea.io/gitea/models/door43metadata" + user_model "code.gitea.io/gitea/models/user" +) + +// GetRepoLanguages gets the languages of the user's repos and returns alphabetized list +func GetRepoLanguages(ctx context.Context, u *user_model.User) []string { + fields, _ := SearchDoor43MetadataField(ctx, &door43metadata.SearchCatalogOptions{ + Owners: []string{u.LowerName}, + Stage: door43metadata.StageLatest, + }, "language") + return fields +} + +// GetRepoSubjects gets the subjects of the user's repos and returns alphabetized list +func GetRepoSubjects(ctx context.Context, u *user_model.User) []string { + fields, _ := SearchDoor43MetadataField(ctx, &door43metadata.SearchCatalogOptions{ + Owners: []string{u.LowerName}, + Stage: door43metadata.StageLatest, + }, "subject") + return fields +} + +// GetRepoMetadataTypes gets the metadata types of the user's repos and returns alphabetized list +func GetRepoMetadataTypes(ctx context.Context, u *user_model.User) []string { + fields, _ := SearchDoor43MetadataField(ctx, &door43metadata.SearchCatalogOptions{ + Owners: []string{u.LowerName}, + Stage: door43metadata.StageLatest, + }, "metadata_type") + return fields +} diff --git a/modules/context/repo.go b/modules/context/repo.go index 9efa2ab3c0..4c1d6d84ff 100644 --- a/modules/context/repo.go +++ b/modules/context/repo.go @@ -395,6 +395,14 @@ func repoAssignment(ctx *Context, repo *repo_model.Repository) { return } + /*** DCS Customizations ***/ + err = repo.LoadLatestDMs(ctx) + if err != nil { + ctx.ServerError("LoadLatestDMs", err) + return + } + /*** END DCS Customizations ***/ + ctx.Repo.Repository = repo ctx.Data["PushMirrors"] = pushMirrors ctx.Data["RepoName"] = ctx.Repo.Repository.Name diff --git a/modules/dcs/books.go b/modules/dcs/books.go new file mode 100644 index 0000000000..26dc14d0a7 --- /dev/null +++ b/modules/dcs/books.go @@ -0,0 +1,191 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package dcs + +import "strconv" + +var BookNames = map[string]string{ //nolint + "frt": "Front Matter", + "bak": "Back Matter", + "gen": "Genesis", + "exo": "Exodus", + "lev": "Leviticus", + "num": "Numbers", + "deu": "Deuteronomy", + "jos": "Joshua", + "jdg": "Judges", + "rut": "Ruth", + "1sa": "1 Samuel", + "2sa": "2 Samuel", + "1ki": "1 Kings", + "2ki": "2 Kings", + "1ch": "1 Chronicles", + "2ch": "2 Chronicles", + "ezr": "Ezra", + "neh": "Nehemiah", + "est": "Esther", + "job": "Job", + "psa": "Psalms", + "pro": "Proverbs", + "ecc": "Ecclesiastes", + "sng": "Song of Solomon", + "isa": "Isaiah", + "jer": "Jeremiah", + "lam": "Lamentations", + "ezk": "Ezekiel", + "dan": "Daniel", + "hos": "Hosea", + "jol": "Joel", + "amo": "Amos", + "oba": "Obadiah", + "jon": "Jonah", + "mic": "Micah", + "nam": "Nahum", + "hab": "Habakkuk", + "zep": "Zephaniah", + "hag": "Haggai", + "zec": "Zechariah", + "mal": "Malachi", + "mat": "Matthew", + "mrk": "Mark", + "luk": "Luke", + "jhn": "John", + "act": "Acts", + "rom": "Romans", + "1co": "1 Corinthians", + "2co": "2 Corinthians", + "gal": "Galatians", + "eph": "Ephesians", + "php": "Philippians", + "col": "Colossians", + "1th": "1 Thessalonians", + "2th": "2 Thessalonians", + "1ti": "1 Timothy", + "2ti": "2 Timothy", + "tit": "Titus", + "phm": "Philemon", + "heb": "Hebrews", + "jas": "James", + "1pe": "1 Peter", + "2pe": "2 Peter", + "1jn": "1 John", + "2jn": "2 John", + "3jn": "3 John", + "jud": "Jude", + "rev": "Revelation", + "obs": "Open Bible Stories", +} + +var BookNumbers = map[string]string{ //nolint + "frt": "A0", + "bak": "B0", + "gen": "01", + "exo": "02", + "lev": "03", + "num": "04", + "deu": "05", + "jos": "06", + "jdg": "07", + "rut": "08", + "1sa": "09", + "2sa": "10", + "1ki": "11", + "2ki": "12", + "1ch": "13", + "2ch": "14", + "ezr": "15", + "neh": "16", + "est": "17", + "job": "18", + "psa": "19", + "pro": "20", + "ecc": "21", + "sng": "22", + "isa": "23", + "jer": "24", + "lam": "25", + "ezk": "26", + "dan": "27", + "hos": "28", + "jol": "29", + "amo": "30", + "oba": "31", + "jon": "32", + "mic": "33", + "nam": "34", + "hab": "35", + "zep": "36", + "hag": "37", + "zec": "38", + "mal": "39", + "mat": "41", + "mrk": "42", + "luk": "43", + "jhn": "44", + "act": "45", + "rom": "46", + "1co": "47", + "2co": "48", + "gal": "49", + "eph": "50", + "php": "51", + "col": "52", + "1th": "53", + "2th": "54", + "1ti": "55", + "2ti": "56", + "tit": "57", + "phm": "58", + "heb": "59", + "jas": "60", + "1pe": "61", + "2pe": "62", + "1jn": "63", + "2jn": "64", + "3jn": "65", + "jud": "66", + "rev": "67", + "obs": "0", +} + +// IsValidBook returns true if string is a valid book or is obs +func IsValidBook(book string) bool { + _, ok := BookNames[book] + return ok +} + +func BookIsOT(book string) bool { + return IsValidBook(book) && BookNumbers[book] > "0" && BookNumbers[book] < "40" +} + +func BookIsNT(book string) bool { + return IsValidBook(book) && BookNumbers[book] > "40" && BookNumbers[book] <= "67" +} + +func GetTestament(book string) string { + if BookIsOT(book) { + return "ot" + } + if BookIsNT(book) { + return "nt" + } + return "" +} + +func GetBookCategories(book string) []string { + testament := GetTestament(book) + if testament != "" { + testament = "bible-" + testament + return []string{testament} + } + return nil +} + +func GetBookSort(book string) int { + var i int + if num, ok := BookNumbers[book]; ok { + i, _ = strconv.Atoi(num) + } + return i +} diff --git a/modules/dcs/files.go b/modules/dcs/files.go new file mode 100644 index 0000000000..3f5299babb --- /dev/null +++ b/modules/dcs/files.go @@ -0,0 +1,87 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package dcs + +import ( + "bytes" + "io" + + "code.gitea.io/gitea/modules/charset" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/util" + + "gopkg.in/yaml.v2" +) + +// ReadFileFromBlob reads a file from a blob and returns the content +func ReadFileFromBlob(blob *git.Blob) ([]byte, error) { + dataRc, err := blob.DataAsync() + if err != nil { + log.Warn("DataAsync Error: %v\n", err) + return nil, err + } + defer dataRc.Close() + + buf := make([]byte, 1024) + n, _ := util.ReadAtMost(dataRc, buf) + buf = buf[:n] + + rd := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc)) + buf, err = io.ReadAll(rd) + if err != nil { + log.Error("io.ReadAll: %v", err) + return nil, err + } + return buf, nil +} + +// ReadYAMLFromBlob reads a yaml file from a blob and unmarshals it +func ReadYAMLFromBlob(blob *git.Blob) (*map[string]interface{}, error) { + buf, err := ReadFileFromBlob(blob) + if err != nil { + return nil, err + } + + var result *map[string]interface{} + if err := yaml.Unmarshal(buf, &result); err != nil { + log.Error("yaml.Unmarshal: %v", err) + return nil, err + } + if result != nil { + for k, v := range *result { + if val, err := ToStringKeys(v); err != nil { + log.Error("ToStringKeys: %v", err) + } else { + (*result)[k] = val + } + } + } + return result, nil +} + +// ReadJSONFromBlob reads a json file from a blob and unmarshals it +func ReadJSONFromBlob(blob *git.Blob) (*map[string]interface{}, error) { + buf, err := ReadFileFromBlob(blob) + if err != nil { + return nil, err + } + + var result *map[string]interface{} + if err := json.Unmarshal(buf, &result); err != nil { + log.Error("json.Unmarshal: %v", err) + return nil, err + } + if result != nil { + for k, v := range *result { + if val, err := ToStringKeys(v); err != nil { + log.Error("ToStringKeys: %v", err) + } else { + (*result)[k] = val + } + } + } + return result, nil +} diff --git a/modules/dcs/languages.go b/modules/dcs/languages.go new file mode 100644 index 0000000000..5a9c94dfec --- /dev/null +++ b/modules/dcs/languages.go @@ -0,0 +1,131 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package dcs + +import ( + "bytes" + "fmt" + "net/http" + "strings" + "time" + + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/options" +) + +var ( + _langnamesJSON []map[string]interface{} + _langnamesJSONKeyed map[string]map[string]interface{} +) + +// GetLangnamesJSON returns an array of maps from https://td.door43.org/exports/langnames.json +// Will use custom/options/languages/langnames.json instead if exists +func GetLangnamesJSON() []map[string]interface{} { + if _langnamesJSON == nil { + if langnames, err := GetLangnamesJSONFromCustom(); err == nil && langnames != nil { + _langnamesJSON = langnames + } else { + langnames, err := GetLangnamesJSONFromTD() + if err != nil { + log.Error(err.Error()) + } else { + _langnamesJSON = langnames + } + } + } + return _langnamesJSON +} + +func GetLangnamesJSONFromCustom() ([]map[string]interface{}, error) { + fileBuf, err := options.AssetFS().ReadFile("languages", "langnames.json") + if err != nil { + log.Debug("HERE: %s: %v", fileBuf, err) + return nil, err + } + reader := bytes.NewReader(fileBuf) + langnames := []map[string]interface{}{} + if err := json.NewDecoder(reader).Decode(&langnames); err != nil { + return nil, fmt.Errorf("unable to decode langnames.json from custom/options/languages/langnames.json: %v", err) + } + return langnames, nil +} + +func GetLangnamesJSONFromTD() ([]map[string]interface{}, error) { + langnames := []map[string]interface{}{} + url := "https://td.unfoldingword.org/exports/langnames.json" + myClient := &http.Client{Timeout: 10 * time.Second} + response, err := myClient.Get(url) + if err == nil { + defer response.Body.Close() + if err := json.NewDecoder(response.Body).Decode(&langnames); err != nil { + return nil, fmt.Errorf("unable to decode langnames.json from tD: %v", err) + } + } + return langnames, nil +} + +func GetLangnamesJSONKeyed() map[string]map[string]interface{} { + if _langnamesJSONKeyed == nil { + _langnamesJSONKeyed = map[string]map[string]interface{}{} + langnames := GetLangnamesJSON() + for _, value := range langnames { + _langnamesJSONKeyed[value["lc"].(string)] = value + } + } + return _langnamesJSONKeyed +} + +// GetLanguageFromRepoName determines the language of a repo by its repo name +func GetLanguageFromRepoName(repoName string) string { + parts := strings.Split(strings.ToLower(repoName), "_") + if len(parts) >= 2 && IsValidLanguage(parts[0]) && IsValidResource(parts[1]) { + return parts[0] + } + parts = strings.Split(strings.ToLower(repoName), "-") + if len(parts) == 3 && IsValidLanguage(parts[0]) && (parts[1] == "texttranslation" || parts[2] == "textstories") { + return parts[0] + } + return "" +} + +// IsValidLanguage returns true if string is a valid language code +func IsValidLanguage(lang string) bool { + langnames := GetLangnamesJSONKeyed() + _, ok := langnames[lang] + return ok +} + +// GetLanguageDirection returns the language direction +func GetLanguageDirection(lang string) string { + langnames := GetLangnamesJSONKeyed() + if data, ok := langnames[lang]; ok { + if val, ok := data["ld"].(string); ok { + return val + } + } + return "ltr" +} + +// GetLanguageTitle returns the language title +func GetLanguageTitle(lang string) string { + langnames := GetLangnamesJSONKeyed() + if data, ok := langnames[lang]; ok { + if val, ok := data["ln"].(string); ok { + return val + } + } + return "" +} + +// LanguageIsGL returns true if string is a valid language and is a GL +func LanguageIsGL(lang string) bool { + langnames := GetLangnamesJSONKeyed() + if data, ok := langnames[lang]; ok { + if val, ok := data["gw"].(bool); ok { + return val + } + } + return false +} diff --git a/modules/dcs/metadata.go b/modules/dcs/metadata.go new file mode 100644 index 0000000000..a06cfe5d1f --- /dev/null +++ b/modules/dcs/metadata.go @@ -0,0 +1,100 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package dcs + +import ( + "fmt" + "html" + "html/template" + "strings" + + "github.com/sergi/go-diff/diffmatchpatch" +) + +// GetCsvCellDiff returns the diff of two strings +func GetCsvCellDiff(old, new string) template.HTML { + dmp := diffmatchpatch.New() + + diffs := dmp.DiffMain(old, new, false) + diffs = dmp.DiffCleanupSemantic(diffs) + + if len(diffs) == 0 { + return template.HTML(fmt.Sprintf("%s%s", old, new)) + } + + return template.HTML(writeDiffHTML(diffs)) +} + +func writeDiffHTML(diffs []diffmatchpatch.Diff) string { + removedCode := "" + removed := false + addedCode := "" + added := false + + // write the diff + for _, chunk := range diffs { + txt := html.EscapeString(chunk.Text) + txt = strings.ReplaceAll(txt, "\n", "↩\n") + switch chunk.Type { + case diffmatchpatch.DiffInsert: + addedCode += `` + addedCode += txt + addedCode += `` + added = true + case diffmatchpatch.DiffDelete: + removedCode += `` + removedCode += txt + removedCode += `` + removed = true + case diffmatchpatch.DiffEqual: + addedCode += txt + removedCode += txt + } + } + + if added && removed { + return fmt.Sprintf(`

%s
%s
`, removedCode, addedCode) + } else if added { + return fmt.Sprintf(`
%s
`, addedCode) + } else if removed { + return fmt.Sprintf(`
%s
`, removedCode) + } + return fmt.Sprintf(`
%s
`, addedCode) +} + +// GetMetadataTypeFromRepoName determines the metadata type of a repo by its repo name format +func GetMetadataTypeFromRepoName(repoName string) string { + parts := strings.Split(strings.ToLower(repoName), "_") + if len(parts) == 2 && IsValidLanguage(parts[0]) && IsValidResource(parts[1]) { + return "rc" + } + if len(parts) == 4 && IsValidLanguage(parts[0]) && IsValidBook(parts[2]) && parts[3] == "book" { + return "tc" + } + if len(parts) == 4 && IsValidLanguage(parts[0]) && IsValidBook(parts[1]) && parts[2] == "text" { + return "ts" + } + parts = strings.Split(strings.ToLower(repoName), "-") + if len(parts) == 3 && IsValidLanguage(parts[0]) && (parts[1] == "textstories" || parts[1] == "texttranslation") { + return "sb" + } + return "" +} + +// GetMetadataVersionFromRepoName returns the default version for each metadata type based on given metadata type +func GetDefaultMetadataVersionForType(metadataType string) string { + if metadataType == "rc" { + return "0.2" + } + if metadataType == "sb" { + return "1.0.0" + } + if metadataType == "tc" { + return "8" + } + if metadataType == "ts" { + return "7" + } + return "" +} diff --git a/modules/dcs/rc02.go b/modules/dcs/rc02.go new file mode 100644 index 0000000000..3bc1097254 --- /dev/null +++ b/modules/dcs/rc02.go @@ -0,0 +1,115 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package dcs + +import ( + "bytes" + "html/template" + "io" + "net/http" + "strings" + + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/options" + + "github.com/santhosh-tekuri/jsonschema/v5" + + _ "github.com/santhosh-tekuri/jsonschema/v5/httploader" // Loader for Schema via HTTP +) + +var rc02Schema *jsonschema.Schema + +// GetRC02Schema returns the schema for RC v0.2 +func GetRC02Schema(reload bool) (*jsonschema.Schema, error) { + githubPrefix := "https://raw.githubusercontent.com/unfoldingWord/rc-schema/master/" + if rc02Schema == nil || reload { + jsonschema.Loaders["https"] = func(url string) (io.ReadCloser, error) { + res, err := http.Get(url) + if err == nil && res != nil && res.StatusCode == 200 { + return res.Body, nil + } + log.Warn("GetRC02Schema: not able to get the schema file remotely [%q]: %v", url, err) + uriPath := strings.TrimPrefix(url, githubPrefix) + fileBuf, err := options.AssetFS().ReadFile("schema", "rc02", uriPath) + if err != nil { + log.Error("GetRC02Schema: local schema file not found: [options/schema/rc02/%s]: %v", uriPath, err) + return nil, err + } + return io.NopCloser(bytes.NewReader(fileBuf)), nil + } + var err error + rc02Schema, err = jsonschema.Compile(githubPrefix + "rc.schema.json") + if err != nil { + return nil, err + } + } + return rc02Schema, nil +} + +// ValidateManifestFileAsHTML validates a manifest file and returns the results as template.HTML +func ValidateManifestFileAsHTML(entry *git.TreeEntry) template.HTML { + var result *jsonschema.ValidationError + if r, err := ValidateManifestTreeEntry(entry); err != nil { + log.Warn("ValidateManifestTreeEntry: %v\n", err) + } else { + result = r + } + return template.HTML(ConvertValidationErrorToHTML(result)) +} + +// ValidateManifestTreeEntry validates a tree entry that is a manifest file and returns the results +func ValidateManifestTreeEntry(entry *git.TreeEntry) (*jsonschema.ValidationError, error) { + if entry == nil { + return nil, nil + } + manifest, err := ReadYAMLFromBlob(entry.Blob()) + if err != nil { + return nil, err + } + return ValidateMapByRC02Schema(manifest) +} + +// ValidateMetadataFileAsHTML validates a metadata file and returns the results as template.HTML +func ValidateMetadataFileAsHTML(entry *git.TreeEntry) template.HTML { + var result *jsonschema.ValidationError + if r, err := ValidateMetadataTreeEntry(entry); err != nil { + log.Warn("ValidateManifestTreeEntry: %v\n", err) + } else { + result = r + } + return template.HTML(ConvertValidationErrorToHTML(result)) +} + +// ValidateMetadataTreeEntry validates a tree entry that is a metadata file and returns the results +func ValidateMetadataTreeEntry(entry *git.TreeEntry) (*jsonschema.ValidationError, error) { + if entry == nil { + return nil, nil + } + metadata, err := ReadJSONFromBlob(entry.Blob()) + if err != nil { + return nil, err + } + return ValidateMapBySB100Schema(metadata) +} + +// ValidateMapByRC02Schema Validates a map structure by the RC v0.2.0 schema and returns the result +func ValidateMapByRC02Schema(data *map[string]interface{}) (*jsonschema.ValidationError, error) { + if data == nil { + return &jsonschema.ValidationError{Message: "file cannot be empty"}, nil + } + schema, err := GetRC02Schema(false) + if err != nil { + return nil, err + } + if err = schema.Validate(*data); err != nil { + switch e := err.(type) { + case *jsonschema.ValidationError: + return e, nil + default: + return nil, e + } + } + return nil, nil +} diff --git a/modules/dcs/sc100.go b/modules/dcs/sc100.go new file mode 100644 index 0000000000..0293a341d4 --- /dev/null +++ b/modules/dcs/sc100.go @@ -0,0 +1,151 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package dcs + +import ( + "bytes" + "io" + "net/http" + "strings" + + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/options" + + "github.com/santhosh-tekuri/jsonschema/v5" + + _ "github.com/santhosh-tekuri/jsonschema/v5/httploader" // Loader for Schema via HTTP +) + +var sb100Schema *jsonschema.Schema + +func GetSBDataFromBlob(blob *git.Blob) (*SBMetadata100, error) { + buf, err := ReadFileFromBlob(blob) + if err != nil { + return nil, err + } + sbEncoded := &SBEncodedMetadata{} + if err = json.Unmarshal(buf, sbEncoded); err == nil { + buf = sbEncoded.Data + } + + sb100 := &SBMetadata100{} + if err := json.Unmarshal(buf, sb100); err != nil { + return nil, err + } + + // Now make a generic map of the buffer to store in the database table + sb100.Metadata = &map[string]interface{}{} + if err := json.Unmarshal(buf, sb100.Metadata); err != nil { + return nil, err + } + + return sb100, nil +} + +// GetSB100Schema returns the schema for SB v1.0.0 +func GetSB100Schema(reload bool) (*jsonschema.Schema, error) { + // We must use githubURLPrefix due to certificate issues + burritoBiblePrefix := "https://burrito.bible/schema/" + githubPrefix := "https://raw.githubusercontent.com/bible-technology/scripture-burrito/v1.0.0/schema/" + if sb100Schema == nil || reload { + jsonschema.Loaders["https"] = func(url string) (io.ReadCloser, error) { + uriPath := strings.TrimPrefix(url, burritoBiblePrefix) + githubURL := githubPrefix + uriPath + res, err := http.Get(githubURL) + if err == nil && res != nil && res.StatusCode == 200 { + return res.Body, nil + } + log.Error("GetSB100Schema: not able to get the schema file remotely [%q]: %v", url, err) + fileBuf, err := options.AssetFS().ReadFile("schema", "sb100", uriPath) + if err != nil { + log.Error("GetSB100Schema: local schema file not found: [options/schema/sb100/%s]: %v", uriPath, err) + return nil, err + } + return io.NopCloser(bytes.NewReader(fileBuf)), nil + } + var err error + sb100Schema, err = jsonschema.Compile(burritoBiblePrefix + "metadata.schema.json") + if err != nil { + return nil, err + } + } + return sb100Schema, nil +} + +// ValidateMapBySB100Schema Validates a map structure by the RC v0.2.0 schema and returns the result +func ValidateMapBySB100Schema(data *map[string]interface{}) (*jsonschema.ValidationError, error) { + if data == nil { + return &jsonschema.ValidationError{Message: "file cannot be empty"}, nil + } + schema, err := GetSB100Schema(false) + if err != nil { + return nil, err + } + if err = schema.Validate(*data); err != nil { + switch e := err.(type) { + case *jsonschema.ValidationError: + return e, nil + default: + return nil, e + } + } + return nil, nil +} + +type SBEncodedMetadata struct { + Type string `json:"type"` + Data []byte `json:"data"` +} + +type SBMetadata100 struct { + Format string `json:"format"` + Meta SB100Meta `json:"meta"` + Identification SB100Identification `json:"identification"` + Languages []SB100Language `json:"languages"` + Type SB100Type `json:"type"` + LocalizedNames *map[string]SB100LocalizedName `json:"localizedNames"` + Metadata *map[string]interface{} +} + +type SB100Meta struct { + Version string `json:"version"` + DefaultLocal string `json:"defaultLocale"` + DateCreate string `json:"dateCreated"` + Normalization string `json:"normalization:"` +} + +type SB100Identification struct { + Name SB100En `json:"name"` + Abbreviation SB100En `json:"abbreviation"` +} + +type SB100En struct { + En string `json:"en"` +} + +type SB100Language struct { + Tag string `json:"tag"` + Name SB100En `json:"name"` +} + +type SB100Type struct { + FlavorType SB100FlavorType `json:"flavorType"` +} + +type SB100FlavorType struct { + Name string `json:"name"` + Flavor SB100Flavor `json:"flavor"` +} + +type SB100Flavor struct { + Name string `json:"name"` +} + +type SB100LocalizedName struct { + Short SB100En `json:"short"` + Abbr SB100En `json:"abbr"` + Long SB100En `json:"long"` +} diff --git a/modules/dcs/strings.go b/modules/dcs/strings.go new file mode 100644 index 0000000000..a2818148b4 --- /dev/null +++ b/modules/dcs/strings.go @@ -0,0 +1,69 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package dcs + +import ( + "errors" + "fmt" + "strings" +) + +// StringHasSuffix returns bool if str ends in the suffix +func StringHasSuffix(str, suffix string) bool { + return strings.HasSuffix(str, suffix) +} + +// ToStringKeys takes an interface and change it to map[string]interface{} on all levels +func ToStringKeys(val interface{}) (interface{}, error) { + var err error + switch val := val.(type) { + case map[interface{}]interface{}: + m := make(map[string]interface{}) + for k, v := range val { + k, ok := k.(string) + if !ok { + return nil, errors.New("found non-string key") + } + m[k], err = ToStringKeys(v) + if err != nil { + return nil, err + } + } + return m, nil + case map[string]interface{}: + m := make(map[string]interface{}) + for k, v := range val { + m[k], err = ToStringKeys(v) + if err != nil { + return nil, err + } + } + return m, nil + case []interface{}: + l := make([]interface{}, len(val)) + for i, v := range val { + l[i], err = ToStringKeys(v) + if err != nil { + return nil, err + } + } + return l, nil + default: + return val, nil + } +} + +// GetColorFromString gets a hexidecimal number for a color based on string +func GetColorFromString(str string) string { + hash := 0 + for i := 0; i < len(str); i++ { + hash = int(str[i]) + ((hash << 5) - hash) + } + color := "#" + for i := 0; i < 3; i++ { + value := (hash >> (i * 8)) & 0xFF + color += fmt.Sprintf("%02x", value) + } + return color +} diff --git a/modules/dcs/subjects.go b/modules/dcs/subjects.go new file mode 100644 index 0000000000..ed3c4fa5ad --- /dev/null +++ b/modules/dcs/subjects.go @@ -0,0 +1,72 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package dcs + +import ( + "strings" +) + +// ResourceToSubjectMap are the valid subjects keyed by their resource ID +var ResourceToSubjectMap = map[string]string{ + "glt": "Aligned Bible", + "gst": "Aligned Bible", + "obs-sn": "OBS Study Notes", + "obs-sq": "OBS Study Questions", + "obs-tn": "OBS Translation Notes", + "obs-tq": "OBS Translation Questions", + "obs": "Open Bible Stories", + "obs-twl": "TSV OBS Translation Words Links", + "sn": "Study Notes", + "sq": "Study Questions", + "ta": "Translation Academy", + "tl": "Training Library", + "tn": "TSV Translation Notes", + "tq": "TSV Translation Questions", + "tw": "Translation Words", + "twl": "TSV Translation Word Links", + "sn-tsv": "TSV Study Notes", + "sq-tsv": "TSV Study Questions", + "tn-tsv": "TSV Translation Notes", + "tq-tsv": "TSV Translation Questions", + "twl-tsv": "TSV Translation Words Links", + "obs-sn-tsv": "TSV OBS Study Notes", + "obs-sq-tsv": "TSV OBS Study Questions", + "obs-tn-tsv": "TSV OBS Translation Notes", + "obs-tq-tsv": "TSV OBS Translation Questions", + "obs-twl-tsv": "TSV OBS Translation Words Links", + "ult": "Aligned Bible", + "ust": "Aligned Bible", +} + +// GetSubjectFromRepoName determines the subject of a repo by its repo name +func GetSubjectFromRepoName(repoName string) string { + parts := strings.Split(strings.ToLower(repoName), "_") + if len(parts) == 2 && IsValidResource(parts[1]) && IsValidLanguage(parts[0]) { + return ResourceToSubjectMap[parts[1]] + } + if len(parts) == 4 && IsValidLanguage(parts[0]) && IsValidBook(parts[2]) && parts[3] == "book" { + return "Aligned Bible" + } + if len(parts) == 4 && IsValidLanguage(parts[0]) && IsValidBook(parts[1]) && parts[2] == "text" { + if parts[1] == "obs" { + return "Open Bible Stories" + } + return "Bible" + } + parts = strings.Split(repoName, "-") + if len(parts) == 3 && IsValidLanguage(parts[0]) { + if parts[1] == "textstories" { + return "Open Bible Stories" + } else if parts[2] == "texttranslation" { + return "Bible" + } + } + return "" +} + +// IsValidResource returns true if it is a valid resource +func IsValidResource(str string) bool { + _, ok := ResourceToSubjectMap[str] + return ok +} diff --git a/modules/dcs/tcts.go b/modules/dcs/tcts.go new file mode 100644 index 0000000000..c828c9be4e --- /dev/null +++ b/modules/dcs/tcts.go @@ -0,0 +1,63 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package dcs + +import ( + "strconv" + "strings" + + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/structs" +) + +func GetTcTsManifestFromBlob(blob *git.Blob) (*structs.TcTsManifest, error) { + buf, err := ReadFileFromBlob(blob) + if err != nil { + return nil, err + } + t := &structs.TcTsManifest{} + err = json.Unmarshal(buf, t) + if err != nil { + return nil, err + } + if t.TcVersion >= 7 { + t.MetadataVersion = strconv.Itoa(t.TcVersion) + t.MetadataType = "tc" + t.Format = "usfm" + t.Subject = "Aligned Bible" + } else if t.TsVersion >= 3 { + t.MetadataVersion = strconv.Itoa(t.TsVersion) + t.MetadataType = "ts" + if t.Resource.ID == "" { + t.Resource.ID = t.ResourceID + } + if t.Resource.Name == "" { + t.Resource.Name = strings.ToUpper(t.Resource.ID) + } + + if t.Project.Name == "" { + t.Project.Name = strings.ToUpper(t.Project.ID) + } + if t.Resource.ID == "obs" { + t.Subject = "Open Bible Stories" + } else { + t.Subject = "Bible" + } + } else { + return nil, nil + } + + if t.Resource.Name != "" { + t.Title = t.Resource.Name + } + if strings.ToLower(t.Resource.ID) != "obs" && t.Project.Name != "" && !strings.Contains(strings.ToLower(t.Title), strings.ToLower(t.Project.Name)) { + if t.Title != "" { + t.Title += " - " + } + t.Title += t.Project.Name + } + + return t, nil +} diff --git a/modules/dcs/valdation.go b/modules/dcs/valdation.go new file mode 100644 index 0000000000..1c1f9292ae --- /dev/null +++ b/modules/dcs/valdation.go @@ -0,0 +1,141 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package dcs + +import ( + "bytes" + "fmt" + "io" + "sort" + "strings" + + "code.gitea.io/gitea/modules/charset" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/util" + + "github.com/santhosh-tekuri/jsonschema/v5" +) + +// ValidateYAMLFile validates a yaml file +func ValidateYAMLFile(entry *git.TreeEntry) string { + if _, err := ReadYAMLFromBlob(entry.Blob()); err != nil { + return strings.ReplaceAll(err.Error(), " converting YAML to JSON", "") + } + return "" +} + +// ValidateJSONFile validates a json file +func ValidateJSONFile(entry *git.TreeEntry) string { + if err := ValidateJSONFromBlob(entry.Blob()); err != nil { + log.Warn("Error decoding JSON file %s: %v\n", entry.Name(), err) + return fmt.Sprintf("Error reading JSON file %s: %s\n", entry.Name(), err.Error()) + } + return "" +} + +// ConvertValidationErrorToString returns a semi-colon & new line separated string of the validation errors +func ConvertValidationErrorToString(valErr *jsonschema.ValidationError) string { + return convertValidationErrorToString(valErr, nil, "") +} + +func convertValidationErrorToString(valErr, parentErr *jsonschema.ValidationError, padding string) string { + if valErr == nil { + return "" + } + str := padding + if parentErr == nil { + str += fmt.Sprintf("Invalid: %s\n", strings.TrimSuffix(valErr.Message, "#")) + if len(valErr.Causes) > 0 { + str += "* :\n" + } + } else { + loc := "" + if valErr.InstanceLocation != "" { + loc = strings.ReplaceAll(strings.TrimPrefix(strings.TrimPrefix(valErr.InstanceLocation, parentErr.InstanceLocation), "/"), "/", ".") + if loc != "" { + loc = fmt.Sprintf("%s: ", strings.TrimPrefix(loc, "/")) + } + } + str += fmt.Sprintf("* %s%s\n", loc, valErr.Message) + } + sort.Slice(valErr.Causes, func(i, j int) bool { return valErr.Causes[i].InstanceLocation < valErr.Causes[j].InstanceLocation }) + for _, cause := range valErr.Causes { + str += convertValidationErrorToString(cause, valErr, padding+" ") + } + return str +} + +// ConvertValidationErrorToHTML converts a validation error object to an HTML string +func ConvertValidationErrorToHTML(valErr *jsonschema.ValidationError) string { + return convertValidationErrorToHTML(valErr, nil) +} + +func convertValidationErrorToHTML(valErr, parentErr *jsonschema.ValidationError) string { + if valErr == nil { + return "" + } + var label string + var html string + if parentErr == nil { + html = fmt.Sprintf("Invalid: %s\n", strings.TrimSuffix(valErr.Message, "#")) + html += "