-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy path.gitlab-ci.yml
270 lines (247 loc) · 9.63 KB
/
.gitlab-ci.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
image: python:3.8
include:
- project: cs/gitlabci-templates
file: /build-image-using-kaniko.yml
stages:
- unit_test
- register
- integration_test
- generate
- deploy
variables:
KUBERNETES_HELPER_MEMORY_REQUEST: 512Mi
KUBERNETES_HELPER_MEMORY_LIMIT: 1Gi
ATLAS_PARCELLATION_ONTOLOGY_ID_STAGING: "https://bbp.epfl.ch/neurosciencegraph/data/ontologies/34388d3b-0b88-4deb-9686-6fcd9ef8990e"
ATLAS_PARCELLATION_ONTOLOGY_TAG_STAGING: "v1.0.1"
ATLAS_PARCELLATION_ONTOLOGY_ID_PROD: "https://bbp.epfl.ch/neurosciencegraph/data/0518dd0b-cbc7-43ef-a75f-45631059c8c5"
ATLAS_PARCELLATION_ONTOLOGY_TAG_PROD: "v1.1.0"
# TEST STAGE
# It runs only when a push is executed in a merge request and an ontology file has changed
# or when a commit is executed in develop branch and an ontology file has changed
# or when a new tag is pushed
test-ontologies-on-merge-request:
stage: unit_test
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
when: always
variables:
atlas_parcellation_ontology: '$ATLAS_PARCELLATION_ONTOLOGY_ID_STAGING'
environment: staging
atlas_parcellation_ontology_tag: $ATLAS_PARCELLATION_ONTOLOGY_TAG_STAGING
tag: "-"
token: $NEXUS_TOKEN_STAGING
script:
- flake8 tests bmo scripts/register_ontologies.py
- flake8 tests bmo scripts/release_views.py
- >
pytest tests/pre_registration
--environment $environment
--atlas_parcellation_ontology $atlas_parcellation_ontology
--atlas_parcellation_ontology_tag $atlas_parcellation_ontology_tag
--token $token
--tag $tag
- >
python scripts/register_ontologies.py
--environment $environment
--atlas_parcellation_ontology $atlas_parcellation_ontology
--atlas_parcellation_ontology_tag $atlas_parcellation_ontology_tag
--token $token
--tag $tag
--no_data_update True
before_script:
- pip install .[dev]
- echo "Using atlas_parcellation_ontology ID $atlas_parcellation_ontology at tag $atlas_parcellation_ontology_tag"
- python scripts/clean_ontology_files.py
--environment $environment
--atlas_parcellation_ontology $atlas_parcellation_ontology
--atlas_parcellation_ontology_tag $atlas_parcellation_ontology_tag
--token $token
--tag $tag
--no_data_update True
# REGISTER STAGE
register_to_environment:
stage: register
rules:
# when a commit is done on develop branch and the pipeline was triggered from a Merge Request
# and an ontology file has changed
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
changes:
- ontologies/bbp/*.ttl
- shapes/**/*.json
- jsonldcontext/*.json
- texts/*.json
- scripts/register_ontologies.py
- bmo/*.py
- .gitlab-ci.yml
when: on_success
variables:
atlas_parcellation_ontology: '$ATLAS_PARCELLATION_ONTOLOGY_ID_STAGING'
environment: staging
atlas_parcellation_ontology_tag: $ATLAS_PARCELLATION_ONTOLOGY_TAG_STAGING
tag: "-"
token: $NEXUS_TOKEN_STAGING
# It runs only when a new tag is pushed.
# It executes ontologies registration for the files changed between this version and the previous one
- if: $CI_COMMIT_TAG
when: on_success
variables:
atlas_parcellation_ontology: $ATLAS_PARCELLATION_ONTOLOGY_ID_PROD
environment: production
atlas_parcellation_ontology_tag: $ATLAS_PARCELLATION_ONTOLOGY_TAG_PROD
tag: $CI_COMMIT_TAG
token: $NEXUS_TOKEN_PRODUCTION
script:
- echo "In environment '$environment', using atlas_parcellation_ontology ID $atlas_parcellation_ontology at tag $atlas_parcellation_ontology_tag"
- >
python scripts/register_ontologies.py
--environment $environment
--atlas_parcellation_ontology $atlas_parcellation_ontology
--atlas_parcellation_ontology_tag $atlas_parcellation_ontology_tag
--token $token
--tag $tag
- echo "environment=$environment" >> register.env
- echo "atlas_parcellation_ontology=$atlas_parcellation_ontology" >> register.env
- echo "atlas_parcellation_ontology_tag=$atlas_parcellation_ontology_tag" >> register.env
- echo "token=$token" >> register.env
- echo "tag=$tag" >> register.env
artifacts:
reports:
dotenv: register.env
before_script:
- pip install .[dev]
create_views:
stage: register
rules:
# It runs only when a new tag is pushed.
# It executes ontologies registration for the files changed between this version and the previous one
- if: $CI_COMMIT_TAG
when: on_success
variables:
atlas_parcellation_ontology: $ATLAS_PARCELLATION_ONTOLOGY_ID_PROD
environment: production
atlas_parcellation_ontology_tag: $ATLAS_PARCELLATION_ONTOLOGY_TAG_PROD
tag: $CI_COMMIT_TAG
token: $NEXUS_TOKEN_PRODUCTION
script:
- echo "Use atlas tag $atlas_parcellation_ontology_tag and the $CI_COMMIT_TAG to create an aggregated view for OBP Explore"
- >
python scripts/release_views.py
--environment $environment
--atlas_parcellation_ontology $atlas_parcellation_ontology
--atlas_parcellation_ontology_tag $atlas_parcellation_ontology_tag
--token $token
--tag $tag
before_script:
- pip install .[dev]
# INTEGRATION TESTS
test-ontologies-on-registration:
stage: integration_test
rules:
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
changes:
- tests/*
when: on_success
- if: '($CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop") || $CI_COMMIT_TAG'
changes:
- ontologies/bbp/*.ttl
- shapes/**/*.json
- jsonldcontext/*.json
- texts/*.json
- scripts/register_ontologies.py
- bmo/*.py
- .gitlab-ci.yml
needs: ["register_to_environment"]
when: on_success
before_script:
- pip install .[dev]
script:
- >
pytest tests/post_registration
--environment $environment
--atlas_parcellation_ontology $atlas_parcellation_ontology
--atlas_parcellation_ontology_tag $atlas_parcellation_ontology_tag
--token $token
--tag $tag
# GENERATE STAGE
# Building documentation for sphinx and ontodocs
# The docs are generated and stored in artifacts in order to be used in the deploy stage
# Generate ontodocs documentation files and store them in
generate-ontodocs-documentation:
stage: generate
image: python:3.8
rules:
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
when: never
variables:
CLUSTER_NAME: dev_cluster
K8S_SERVER: https://kubernetesdev.bbp.epfl.ch:6443
SA_TOKEN: $SA_TOKEN_DEVELOPMENT
K8S_CA: ".kube/ca-dev.crt"
- if: $CI_COMMIT_TAG
when: never
variables:
CLUSTER_NAME: prod_cluster
K8S_SERVER: https://kubernetes.bbp.epfl.ch:6443/
SA_TOKEN: $SA_TOKEN_PRODUCTION
K8S_CA: ".kube/ca-prod.crt"
before_script:
- curl -LO "https://dl.k8s.io/release/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl"
- chmod +x kubectl
- mkdir -p ~/.local/bin
- mv ./kubectl ~/.local/bin/kubectl
- pip install git+https://$GITLAB_DEPLOY_TOKEN:[email protected]/dke/apps/ontodocs.git
script:
- mkdir visualization
- ontospy gendocs ./ontologies/bbp --theme lumen --outputpath ./visualization --title "Brain Modeling Ontology" --preflabel "label" -v
- ~/.local/bin/kubectl config set-cluster $CLUSTER_NAME --server=$K8S_SERVER --certificate-authority=$K8S_CA
- ~/.local/bin/kubectl config set-context $CLUSTER_NAME --cluster=$CLUSTER_NAME
- ~/.local/bin/kubectl config set-credentials pod-creator-ci --token=$SA_TOKEN
- ~/.local/bin/kubectl config set-context $CLUSTER_NAME --user=pod-creator-ci
- ~/.local/bin/kubectl config use-context $CLUSTER_NAME
- POD=$(~/.local/bin/kubectl get pods -n bbp-ou-dke --selector=app=bmo-ontodocs -o=jsonpath="{.items[*].metadata.name}" | cut -d ' ' -f 1)
- ~/.local/bin/kubectl cp ./visualization/. "$POD:/usr/share/nginx/html/" -n bbp-ou-dke --no-preserve
variables:
KUBERNETES_MEMORY_LIMIT: 4Gi
KUBERNETES_MEMORY_REQUEST: 4Gi
# Generate sphinx documentation and stores the artifact under /generated/html
generate-sphinx-documentation:
stage: generate
image: python:3.8
rules:
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
when: on_success
- if: $CI_COMMIT_TAG
when: on_success
script:
- pip install .[docs]
- sphinx-build -T --keep-going -b html -d _build/doctrees -c ./docs/source -D language=en ./docs/source generated/html
artifacts:
paths:
- generated/html
variables:
KUBERNETES_MEMORY_LIMIT: 4Gi
KUBERNETES_MEMORY_REQUEST: 4Gi
# DEPLOY STAGE
# Executes deployment of project documentation
deploy-sphinx-documentation:
stage: deploy
extends: .build-image-using-kaniko
dependencies:
- generate-sphinx-documentation
rules:
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
when: on_success
variables:
CI_REGISTRY_IMAGE: $CI_REGISTRY_IMAGE/sphinx-documentation-staging
- if: $CI_COMMIT_TAG
when: on_success
variables:
CI_REGISTRY_IMAGE: $CI_REGISTRY_IMAGE/sphinx-documentation-production
variables:
KANIKO_EXTRA_ARGS: "--build-arg GENERATED_DOCS_PATH=generated/html"
CI_COMMIT_SHORT_SHA: $CI_COMMIT_SHORT_SHA
REGISTRY_IMAGE_TAG: $CI_COMMIT_SHORT_SHA-$(date +%s)
KUBERNETES_MEMORY_LIMIT: 4Gi
KUBERNETES_MEMORY_REQUEST: 4Gi