-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy path.gitlab-ci.yml
221 lines (186 loc) · 5.82 KB
/
.gitlab-ci.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
# Auto DevOps
# This CI/CD configuration provides a standard pipeline for
# * building a Docker image (using a buildpack if necessary),
# * storing the image in the container registry,
# * running tests from a buildpack,
# * running code quality analysis,
# * creating a review app for each topic branch,
# * and continuous deployment to production
#
# In order to deploy, you must have a Kubernetes cluster configured either
# via a project integration, or via group/project variables.
# AUTO_DEVOPS_DOMAIN must also be set as a variable at the group or project
# level, or manually added below.
#
# If you want to deploy to staging first, or enable canary deploys,
# uncomment the relevant jobs in the pipeline below.
#
# If Auto DevOps fails to detect the proper buildpack, or if you want to
# specify a custom buildpack, set a project variable `BUILDPACK_URL` to the
# repository URL of the buildpack.
# e.g. BUILDPACK_URL=https://github.com/heroku/heroku-buildpack-ruby.git#v142
# If you need multiple buildpacks, add a file to your project called
# `.buildpacks` that contains the URLs, one on each line, in order.
# Note: Auto CI does not work with multiple buildpacks yet
image: alpine:latest
variables:
# AUTO_DEVOPS_DOMAIN is the application deployment domain and should be set as a variable at the group or project level.
# AUTO_DEVOPS_DOMAIN: domain.example.com
POSTGRES_USER: user
POSTGRES_PASSWORD: testing-password
POSTGRES_ENABLED: "true"
POSTGRES_DB: $CI_ENVIRONMENT_SLUG
# DOCKER_HOST: /var/run/docker.sock
stages:
- build
- test
- production
.rspec:
stage: test
image: ruby:2.4.1
tags:
- docker
services:
- postgres
- redis
variables:
POSTGRES_DB: aeonvera_test
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
DATABASE_HOST: postgres
DATABASE_NAME: aeonvera_test
DATABASE_USERNAME: postgres
DATABASE_PASSWORD: postgres
REDIS_URL: redis://redis:6379
before_script:
- |
time (
curl -L https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64 > ./cc-test-reporter
chmod +x ./cc-test-reporter
./cc-test-reporter before-build
)
script:
- gem install bundler
- gem install therubyracer
- gem install execjs
- apt-get update
- apt-get install -y nodejs
- bundle install
- bundle exec rake parallel:setup
- COVERAGE=true bundle exec parallel_rspec app/ spec/
after_script:
- time ./cc-test-reporter after-build
artifacts:
paths:
- coverage/
only:
- branches
# test:
# stage: test
# image: tmaier/docker-compose
# # services:
# # - docker:dind
# script:
# - scripts/ci/test.sh
# - docker-compose -f docker-compose.ci.yml down --remove-orphans
# only:
# - branches
# codequality:
# image: docker:latest
# variables:
# DOCKER_DRIVER: overlay2
# allow_failure: true
# services:
# - docker:dind
# script:
# - codeclimate
# artifacts:
# paths: [codeclimate.json]
# This job continuously deploys to production on every push to `master`.
# To make this a manual process, either because you're enabling `staging`
# or `canary` deploys, or you simply want more control over when you deploy
# to production, uncomment the `when: manual` line in the `production` job.
production:
stage: production
# for a smaller base image:
# image: docker:git
# however, this is alpine based.
# which currently isn't supported by nanobox:
# https://github.com/nanobox-io/nanobox/issues/645
image: docker:git
services:
- docker:dind
variables:
DOCKER_DRIVER: overlay2
script:
- ensure_system_dependencies_are_installed
- print_system_info
- setup_docker
- install_nanobox
- deploy
environment:
name: production
url: https://aeonvera.com
# only:
# refs:
# - master
# when: manual
# ---------------------------------------------------------------------------
.auto_devops: &auto_devops |
# Auto DevOps variables and functions
[[ "$TRACE" ]] && set -x
export CI_APPLICATION_REPOSITORY=$CI_REGISTRY_IMAGE/$CI_COMMIT_REF_SLUG
export CI_APPLICATION_TAG=$CI_COMMIT_SHA
function codeclimate() {
cc_opts="--env CODECLIMATE_CODE="$PWD" \
--volume "$PWD":/code \
--volume /var/run/docker.sock:/var/run/docker.sock \
--volume /tmp/cc:/tmp/cc"
docker run ${cc_opts} codeclimate/codeclimate init
docker run ${cc_opts} codeclimate/codeclimate analyze -f json > codeclimate.json
}
function ensure_system_dependencies_are_installed() {
apk add --no-cache bash curl
}
function print_system_info() {
uname -a
cat /etc/*-release
echo "systemd $(which systemd)"
echo "upstart $(which upstart)"
echo "systemctl $(which systemctl)"
echo "initctl $(which initctl)"
}
function setup_docker() {
if ! docker info &>/dev/null; then
if [ -z "$DOCKER_HOST" -a "$KUBERNETES_PORT" ]; then
export DOCKER_HOST='tcp://localhost:2375'
fi
fi
echo "docker is located at $(which docker)"
docker info
}
# NANABOX_APP_NAME
# NANOBOX_USERNAME
# NANOBOX_PASSWORD
function install_nanobox() {
# wget https://s3.amazonaws.com/tools.nanobox.io/nanobox/v2/linux/amd64/nanobox
wget https://d1ormdui8qdvue.cloudfront.net/installers/v2/linux/nanobox-2.tar.gz
mkdir -p nanobox-archive
tar -xf nanobox-2.tar.gz -C ./nanobox-archive
cp ./nanobox-archive/usr/local/bin/* /usr/local/bin/
rm -rf ./nanobox-archive
echo "nanobox is located at $(which nanobox)"
nanobox config set ci-mode true
nanobox config set provider native
nanobox remote add $NANOBOX_APP_NAME
echo "Nanobox is ready to go!"
}
function deploy() {
set -x
set -e
nanobox deploy --debug --trace --verbose || true
cat ~/.nanobox/nanobox.log
exit 1
}
before_script:
- *auto_devops