Skip to content
This repository has been archived by the owner on Oct 8, 2024. It is now read-only.

Commit

Permalink
initial capability (#2)
Browse files Browse the repository at this point in the history
  • Loading branch information
anthonywendt authored Aug 9, 2023
1 parent aa4faf0 commit 18416b2
Show file tree
Hide file tree
Showing 25 changed files with 2,620 additions and 0 deletions.
18 changes: 18 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
.cache/
.idea/
.vscode/
build/
.DS_Store
*.tar.zst
test/tf/public-ec2-instance/.tool-versions
zarf-sbom

# Terraform
test/tf/public-ec2-instance/.test-data
test/tf/public-ec2-instance/.terraform
terraform.tfstate
terraform.tfstate.backup
.terraform.lock.hcl

# SOPS stuff that should never be committed to the repo
secret-sops-gpg.yaml
53 changes: 53 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
hooks:
- id: check-added-large-files
args: ["--maxkb=1024"]
- id: check-merge-conflict
- id: detect-aws-credentials
args:
- "--allow-missing-credentials"
- id: detect-private-key
exclude: |
(?x)^(
kustomizations/bigbang/environment-bb/values-bigbang.enc.yaml
)$
- id: end-of-file-fixer
exclude: "^kustomizations/bigbang/vendor/.*$"
- id: fix-byte-order-marker
- id: trailing-whitespace
exclude: "^kustomizations/bigbang/vendor/.*$"
args: [--markdown-linebreak-ext=md]
- id: check-yaml
exclude: |
(?x)^(
charts/raw/templates/resources.yaml
)$
args:
- "--allow-multiple-documents"
- repo: https://github.com/sirosen/fix-smartquotes
rev: 0.2.0
hooks:
- id: fix-smartquotes
- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.23.0
hooks:
- id: check-jsonschema
name: "Validate Zarf Configs Against Schema"
files: "zarf.yaml"
types: [yaml]
args:
[
"--schemafile",
"https://raw.githubusercontent.com/defenseunicorns/zarf/v0.28.0/zarf.schema.json",
"--no-cache"
]
- repo: https://github.com/golangci/golangci-lint
rev: v1.52.2
hooks:
- id: golangci-lint
- repo: https://github.com/renovatebot/pre-commit-hooks
rev: 35.105.1
hooks:
- id: renovate-config-validator
7 changes: 7 additions & 0 deletions .tool-versions
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
golang 1.20.4
golangci-lint 1.52.2
pre-commit 3.3.2
terraform 1.4.6
tflint 0.46.1
tfsec 1.28.1
sops 3.7.3
186 changes: 186 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,186 @@
# The version of Zarf to use. To keep this repo as portable as possible the Zarf binary will be downloaded and added to
# the build folder.
# renovate: datasource=github-tags depName=defenseunicorns/zarf
ZARF_VERSION := v0.28.4

# The version of the build harness container to use
BUILD_HARNESS_REPO := ghcr.io/defenseunicorns/build-harness/build-harness
# renovate: datasource=docker depName=ghcr.io/defenseunicorns/build-harness/build-harness
BUILD_HARNESS_VERSION := 1.10.2
# renovate: datasource=docker depName=ghcr.io/defenseunicorns/packages/dubbd-k3d extractVersion=^(?<version>\d+\.\d+\.\d+)
DUBBD_K3D_VERSION := 0.6.1

# Figure out which Zarf binary we should use based on the operating system we are on
ZARF_BIN := zarf
UNAME_S := $(shell uname -s)
UNAME_P := $(shell uname -p)
ifneq ($(UNAME_S),Linux)
ifeq ($(UNAME_S),Darwin)
ZARF_BIN := $(addsuffix -mac,$(ZARF_BIN))
endif
ifeq ($(UNAME_P),i386)
ZARF_BIN := $(addsuffix -intel,$(ZARF_BIN))
endif
ifeq ($(UNAME_P),arm64)
ZARF_BIN := $(addsuffix -apple,$(ZARF_BIN))
endif
endif

# Silent mode by default. Run `make VERBOSE=1` to turn off silent mode.
ifndef VERBOSE
.SILENT:
endif

# Optionally add the "-it" flag for docker run commands if the env var "CI" is not set (meaning we are on a local machine and not in github actions)
TTY_ARG :=
ifndef CI
TTY_ARG := -it
endif

.DEFAULT_GOAL := help

# Idiomatic way to force a target to always run, by having it depend on this dummy target
FORCE:

.PHONY: help
help: ## Show a list of all targets
grep -E '^\S*:.*##.*$$' $(MAKEFILE_LIST) \
| sed -n 's/^\(.*\): \(.*\)##\(.*\)/\1:\3/p' \
| column -t -s ":"

########################################################################
# Utility Section
########################################################################

.PHONY: docker-save-build-harness
docker-save-build-harness: ## Pulls the build harness docker image and saves it to a tarball
mkdir -p .cache/docker
docker pull $(BUILD_HARNESS_REPO):$(BUILD_HARNESS_VERSION)
docker save -o .cache/docker/build-harness.tar $(BUILD_HARNESS_REPO):$(BUILD_HARNESS_VERSION)

.PHONY: docker-load-build-harness
docker-load-build-harness: ## Loads the saved build harness docker image
docker load -i .cache/docker/build-harness.tar

.PHONY: run-pre-commit-hooks
run-pre-commit-hooks: ## Run all pre-commit hooks. Returns nonzero exit code if any hooks fail. Uses Docker for maximum compatibility
mkdir -p .cache/pre-commit
docker run --rm -v "${PWD}:/app" --workdir "/app" -e "PRE_COMMIT_HOME=/app/.cache/pre-commit" $(BUILD_HARNESS_REPO):$(BUILD_HARNESS_VERSION) bash -c 'git config --global --add safe.directory /app && asdf install && pre-commit run -a'

.PHONY: fix-cache-permissions
fix-cache-permissions: ## Fixes the permissions on the pre-commit cache
docker run --rm -v "${PWD}:/app" --workdir "/app" -e "PRE_COMMIT_HOME=/app/.cache/pre-commit" $(BUILD_HARNESS_REPO):$(BUILD_HARNESS_VERSION) chmod -R a+rx .cache

########################################################################
# Test Section
########################################################################

.PHONY: test
test: ## Run all automated tests. Requires access to an AWS account. Costs money. Requires env vars "REPO_URL", "GIT_BRANCH", "REGISTRY1_USERNAME", "REGISTRY1_PASSWORD", "GHCR_USERNAME", "GHCR_PASSWORD" and standard AWS env vars.
mkdir -p .cache/go
mkdir -p .cache/go-build
echo "Running automated tests. This will take several minutes. At times it does not log anything to the console. If you interrupt the test run you will need to log into AWS console and manually delete any orphaned infrastructure."
docker run $(TTY_ARG) --rm \
-v "${PWD}:/app" \
-v "${PWD}/.cache/go:/root/go" \
-v "${PWD}/.cache/go-build:/root/.cache/go-build" \
--workdir "/app/test/e2e" \
-e GOPATH=/root/go \
-e GOCACHE=/root/.cache/go-build \
-e REPO_URL \
-e GIT_BRANCH \
-e REGISTRY1_USERNAME \
-e REGISTRY1_PASSWORD \
-e GHCR_USERNAME \
-e GHCR_PASSWORD \
-e AWS_REGION \
-e AWS_DEFAULT_REGION \
-e AWS_ACCESS_KEY_ID \
-e AWS_SECRET_ACCESS_KEY \
-e AWS_SESSION_TOKEN \
-e AWS_SECURITY_TOKEN \
-e AWS_SESSION_EXPIRATION \
-e SKIP_SETUP -e SKIP_TEST \
-e SKIP_TEARDOWN \
$(BUILD_HARNESS_REPO):$(BUILD_HARNESS_VERSION) \
bash -c 'asdf install && go test -v -timeout 2h -p 1 ./...'

.PHONY: test-ssh
test-ssh: ## Run this if you set SKIP_TEARDOWN=1 and want to SSH into the still-running test server. Don't forget to unset SKIP_TEARDOWN when you're done
cd test/tf/public-ec2-instance && terraform init
cd test/tf/public-ec2-instance/.test-data && cat Ec2KeyPair.json | jq -r .PrivateKey > privatekey.pem && chmod 600 privatekey.pem
cd test/tf/public-ec2-instance && ssh -i .test-data/privatekey.pem ubuntu@$$(terraform output public_instance_ip | tr -d '"')

########################################################################
# Cluster Section
########################################################################

cluster/full: cluster/destroy cluster/create build/all deploy/all ## This will destroy any existing cluster, create a new one, then build and deploy all

cluster/create: ## Create a k3d cluster with metallb installed
k3d cluster create k3d-test-cluster --config utils/k3d/k3d-config.yaml -v /etc/machine-id:/etc/machine-id@server:*
k3d kubeconfig merge k3d-test-cluster -o /home/${USER}/cluster-kubeconfig.yaml
echo "Installing Calico..."
kubectl apply --wait=true -f https://k3d.io/v5.5.2/usage/advanced/calico.yaml 2>&1 >/dev/null
echo "Waiting for Calico to be ready..."
kubectl rollout status deployment/calico-kube-controllers -n kube-system --watch --timeout=90s 2>&1 >/dev/null
kubectl rollout status daemonset/calico-node -n kube-system --watch --timeout=90s 2>&1 >/dev/null
kubectl wait --for=condition=Ready pods --all --all-namespaces 2>&1 >/dev/null
echo
utils/metallb/install.sh
echo "Cluster is ready!"

cluster/destroy: ## Destroy the k3d cluster
k3d cluster delete k3d-test-cluster

########################################################################
# Build Section
########################################################################

build/all: build build/zarf build/zarf-init.sha256 build/dubbd-pull-k3d.sha256 build/uds-capability-confluence ##

build: ## Create build directory
mkdir -p build

.PHONY: clean
clean: ## Clean up build files
rm -rf ./build

build/zarf: | build ## Download the Linux flavor of Zarf to the build dir
echo "Downloading zarf"
curl -sL https://github.com/defenseunicorns/zarf/releases/download/$(ZARF_VERSION)/zarf_$(ZARF_VERSION)_Linux_amd64 -o build/zarf
chmod +x build/zarf

build/zarf-mac-intel: | build ## Download the Mac (Intel) flavor of Zarf to the build dir
echo "Downloading zarf-mac-intel"
curl -sL https://github.com/defenseunicorns/zarf/releases/download/$(ZARF_VERSION)/zarf_$(ZARF_VERSION)_Darwin_amd64 -o build/zarf-mac-intel
chmod +x build/zarf-mac-intel

build/zarf-init.sha256: | build ## Download the init package
echo "Downloading zarf-init-amd64-$(ZARF_VERSION).tar.zst"
curl -sL https://github.com/defenseunicorns/zarf/releases/download/$(ZARF_VERSION)/zarf-init-amd64-$(ZARF_VERSION).tar.zst -o build/zarf-init-amd64-$(ZARF_VERSION).tar.zst
echo "Creating shasum of the init package"
shasum -a 256 build/zarf-init-amd64-$(ZARF_VERSION).tar.zst | awk '{print $$1}' > build/zarf-init.sha256

build/dubbd-pull-k3d.sha256: | build ## Download dubbd k3d oci package
./build/zarf package pull oci://ghcr.io/defenseunicorns/packages/dubbd-k3d:$(DUBBD_K3D_VERSION)-amd64 --oci-concurrency 9 --output-directory build
echo "Creating shasum of the dubbd-k3d package"
shasum -a 256 build/zarf-package-dubbd-k3d-amd64-$(DUBBD_K3D_VERSION).tar.zst | awk '{print $$1}' > build/dubbd-pull-k3d.sha256

build/uds-capability-confluence: | build ## Build the confluence capability
build/zarf package create . --skip-sbom --confirm --output-directory build

########################################################################
# Deploy Section
########################################################################

deploy/all: deploy/init deploy/dubbd-k3d deploy/uds-capability-confluence ##

deploy/init: ## Deploy the zarf init package
./build/zarf init --confirm --components=git-server

deploy/dubbd-k3d: ## Deploy the k3d flavor of DUBBD
cd ./build && ./zarf package deploy zarf-package-dubbd-k3d-amd64-$(DUBBD_K3D_VERSION).tar.zst --confirm

deploy/uds-capability-confluence: ## Deploy the confluence capability
cd ./build && ./zarf package deploy zarf-package-confluence-*.tar.zst --confirm
52 changes: 52 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,2 +1,54 @@
# uds-capability-confluence
Platform One Confluence deployed via flux

## Pre-req
- Minimum compute requirements for single node deployment are at LEAST 64 GB RAM and 32 virtual CPU threads (aws `m6i.8xlarge` instance type should do)
- k3d installed on machine

## Deploy

### Use zarf to login to the needed registries i.e. registry1.dso.mil and ghcr.io

```bash
# Download Zarf
make build/zarf

# Login to the registry
set +o history

# registry1.dso.mil (To access registry1 images needed during build time)
export REGISTRY1_USERNAME="YOUR-USERNAME-HERE"
export REGISTRY1_TOKEN="YOUR-TOKEN-HERE"
echo $REGISTRY1_TOKEN | build/zarf tools registry login registry1.dso.mil --username $REGISTRY1_USERNAME --password-stdin

# ghcr.io (To access oci packages needed)
export GH_USERNAME="YOUR-USERNAME-HERE"
export GH_TOKEN="YOUR-TOKEN-HERE"
echo $GH_TOKEN | build/zarf tools registry login ghcr.io --username $GH_USERNAME --password-stdin

set -o history
```

### Deploy Everything

```bash
# This will destroy and create a compatible k3d cluster then it will run make build/all and make deploy/all. Follow the breadcrumbs in the Makefile to see what and how its doing it.
make cluster/full
```

## Import Zarf Skeleton
Below is an example of how to import this projects zarf skeleton into your zarf.yaml. The [uds-package-sofware-factory](https://github.com/defenseunicorns/uds-package-software-factory.git) does this with a subset of the uds-capability projects.

```yaml
components:
- name: values
required: true
files:
- source: <path-to-the-values-you-want-to-use>
target: values-confluence.yaml
- name: confluence
required: true
import:
name: confluence
url: oci://ghcr.io/defenseunicorns/uds-capability/confluence:0.0.1-skeleton
```
13 changes: 13 additions & 0 deletions confluence-flux-values.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
application:
name: confluence
createNamespace: ###ZARF_VAR_CONFLUENCE_CREATE_NAMESPACE###
namespace: confluence
path: chart
repository: https://repo1.dso.mil/big-bang/product/community/confluence.git
ref:
# renovate: datasource=gitlab-tags depName=big-bang/product/community/confluence versioning=loose registryUrl=https://repo1.dso.mil
tag: 1.10.0-bb.3
values: |
###ZARF_VAR_CONFLUENCE_VALUES###
dependsOn:
###ZARF_VAR_CONFLUENCE_DEPENDS_ON###
Loading

0 comments on commit 18416b2

Please sign in to comment.