File: .gitlab-ci.yml

package info (click to toggle)
toil 9.1.0-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 13,864 kB
  • sloc: python: 58,016; makefile: 312; sh: 168
file content (794 lines) | stat: -rw-r--r-- 34,947 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
image: quay.io/ucsc_cgl/toil_ci_prebake:latest
# Note that we must run in a privileged container for our internal Docker daemon to come up.

variables:
  PYTHONIOENCODING: "utf-8"
  DEBIAN_FRONTEND: "noninteractive"
  TOIL_OWNER_TAG: "shared"
  TOIL_HISTORY: "0"
  PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
  MAIN_PYTHON_PKG: "python3.13"
  # Used to tell pytest which tests to be run by specifying markers,
  # Allows partitioning of tests to prevent duplicate running of tests in different jobs.
  # Currently specifies special tests that are not run by quick_test_offline.
  MARKER: "(tes or integrative or encryption or server_mode or fetchable_appliance or appliance or slow or docker or cwl or singularity or rsync3) and not kubernetes"
  TEST_THREADS: "4"
before_script:
  # Log where we are running, in case some Kubernetes hosts are busted. IPs are assigned per host.
  - ip addr
  # Configure Docker and Buildkit to use a mirror for Docker Hub and restart the daemon
  # Set the registry as insecure because it is probably cluster-internal over plain HTTP.
  - |
    if [[ ! -z "${DOCKER_HUB_MIRROR}" ]] ; then
        echo "{\"registry-mirrors\": [\"${DOCKER_HUB_MIRROR}\"], \"insecure-registries\": [\"${DOCKER_HUB_MIRROR##*://}\"]}" | sudo tee /etc/docker/daemon.json
        export SINGULARITY_DOCKER_HUB_MIRROR="${DOCKER_HUB_MIRROR}"
        echo "[registry.\"docker.io\"]" >buildkitd.toml
        echo "  mirrors = [\"${DOCKER_HUB_MIRROR##*://}\"]" >>buildkitd.toml
        echo "[registry.\"${DOCKER_HUB_MIRROR##*://}\"]" >>buildkitd.toml
        echo "  http = true" >>buildkitd.toml
    else
        echo "" >buildkitd.toml
    fi
  # Restart or start the Docker daemon
  - stopdocker || true
  - sudo rm -f /var/run/docker.sock
  - startdocker || true
  - docker info
  - cat /etc/hosts
  - mkdir -p ~/.kube && cp "$GITLAB_SECRET_FILE_KUBE_CONFIG" ~/.kube/config
  - mkdir -p ~/.aws && cp "$GITLAB_SECRET_FILE_AWS_CREDENTIALS" ~/.aws/credentials
  # We need to make sure docker buildx create can't see the ~/.kube/config that we deploy. It has
  # a service account bearer token for auth and triggers https://github.com/docker/buildx/issues/267
  # where buildx can't use a bearer token from a kube config and falls back to anonymous instead
  # of using the system's service account.
  - if [[ "${CI_BUILDKIT_DRIVER}" == "kubernetes" ]] ; then KUBECONFIG=/dev/null docker buildx create --use --name=buildkit --platform=linux/amd64,linux/arm64 --node=buildkit-amd64 --driver=kubernetes --driver-opt="nodeselector=kubernetes.io/arch=amd64" ; else cat buildkitd.toml ; docker buildx create --use --name=container-builder --driver=docker-container --config ./buildkitd.toml ; fi
  # Report on the builders, and make sure they exist.
  - docker buildx inspect --bootstrap || (echo "Docker builder deployment can't be found! Are we on the right Gitlab runner?" && exit 1)
  # This will hang if we can't talk to the builder
  - (echo "y" | docker buildx prune --keep-storage 80G) || true

after_script:
  # We need to clean up any files that Toil may have made via Docker that
  # aren't deletable by the Gitlab user. If we don't do this, Gitlab will try
  # and clean them up before running the next job on the runner, fail, and fail
  # that next job.
  - pwd
  - sudo rm -rf tmp
  - stopdocker || true

stages:
  - linting_and_dependencies
  - basic_tests
  - main_tests
  - integration

lint:
  rules:
    - if: $CI_PIPELINE_SOURCE != "schedule"
  stage: linting_and_dependencies
  cache:
    key: cache-$MAIN_PYTHON_PKG
    paths:
      - .cache/pip
      - .mypy_cache
  script: |
    pwd
    echo -e "\e[0Ksection_start:`date +%s`:prepare\r\e[0KDownload and install all dependencies (except htcondor)"
    ${MAIN_PYTHON_PKG} -m virtualenv venv && . venv/bin/activate && make prepare && make develop extras=[all]
    ${MAIN_PYTHON_PKG} -m pip freeze
    ${MAIN_PYTHON_PKG} --version
    echo -e "\e[0Ksection_end:`date +%s`:prepare\r\e[0K"
    echo -e "\e[0Ksection_start:`date +%s`:mypy\r\e[0KCheck the Python types with mypy"
    make mypy
    echo -e "\e[0Ksection_end:`date +%s`:mypy\r\e[0K"
    echo -e "\e[0Ksection_start:`date +%s`:docs\r\e[0KBuild the docs"
    make docs
    echo -e "\e[0Ksection_end:`date +%s`:docs\r\e[0K"
    check-jsonschema --schemafile https://json.schemastore.org/dependabot-2.0.json .github/dependabot.yml
    # make diff_pydocstyle_report

# We make sure to also lint with our oldest supported Python version on every PR.
py39_lint:
  rules:
    - if: $CI_PIPELINE_SOURCE != "schedule"
  stage: linting_and_dependencies
  cache:
    key: cache-python3.9
    paths:
      - .cache/pip
      - .mypy_cache
  script: |
    pwd
    echo -e "\e[0Ksection_start:`date +%s`:prepare\r\e[0KDownload and install all dependencies (except htcondor)"
    python3.9 -m virtualenv venv && . venv/bin/activate && make prepare && make develop extras=[all]
    python3.9 -m pip freeze
    python3.9 --version
    echo -e "\e[0Ksection_end:`date +%s`:prepare\r\e[0K"
    echo -e "\e[0Ksection_start:`date +%s`:mypy\r\e[0KCheck the Python types with mypy"
    make mypy
    echo -e "\e[0Ksection_end:`date +%s`:mypy\r\e[0K"
    echo -e "\e[0Ksection_start:`date +%s`:docs\r\e[0KBuild the docs"
    make docs
    echo -e "\e[0Ksection_end:`date +%s`:docs\r\e[0K"
    check-jsonschema --schemafile https://json.schemastore.org/dependabot-2.0.json .github/dependabot.yml
    # make diff_pydocstyle_report

cwl_dependency_is_stand_alone:
  rules:
    - if: $CI_PIPELINE_SOURCE != "schedule"
  stage: linting_and_dependencies
  cache:
    key: cache-$MAIN_PYTHON_PKG
    paths:
      - .cache/pip
  script: |
    pwd
    echo -e "\e[0Ksection_start:$(date +%s):prepare\r\e[0KDownload and install the CWL dependencies"
    ${MAIN_PYTHON_PKG} -m virtualenv venv && . venv/bin/activate && make prepare && make develop extras=[cwl]
    echo -e "\e[0Ksection_end:$(date +%s):prepare\r\e[0K"
    echo -e "\e[0Ksection_start:$(date +%s):test\r\e[0KRun a single CWL test: test_run_revsort"
    make test threads="1" tests=src/toil/test/cwl/cwlTest.py::TestCWLWorkflow::test_run_revsort
    echo -e "\e[0Ksection_end:$(date +%s):test\r\e[0K"

wdl_dependency_is_stand_alone:
  rules:
    - if: $CI_PIPELINE_SOURCE != "schedule"
  stage: linting_and_dependencies
  cache:
    key: cache-$MAIN_PYTHON_PKG
    paths:
      - .cache/pip
  script: |
    pwd
    echo -e "\e[0Ksection_start:`date +%s`:prepare\r\e[0KDownload and install the WDL dependencies"
    ${MAIN_PYTHON_PKG} -m virtualenv venv && . venv/bin/activate && make prepare && make develop extras=[wdl]
    echo -e "\e[0Ksection_end:`date +%s`:prepare\r\e[0K"
    echo -e "\e[0Ksection_start:`date +%s`:test\r\e[0KRun a single WDL test: test_run_MD5sum"
    make test threads="1" tests=src/toil/test/wdl/wdltoil_test.py::TestWDL::test_MD5sum
    echo -e "\e[0Ksection_end:`date +%s`:test\r\e[0K"

quick_test_offline:
  rules:
    - if: $CI_PIPELINE_SOURCE != "schedule"
  stage: basic_tests
  cache:
    key: cache-$MAIN_PYTHON_PKG
    paths:
      - .cache/pip
  script: |
    ${MAIN_PYTHON_PKG} -m virtualenv venv
    . venv/bin/activate
    echo -e "\e[0Ksection_start:`date +%s`:prepare\r\e[0KDownload and install the aws, google, and wdl dependencies"
    pip install -U pip wheel
    make prepare
    make develop extras=[aws,google,wdl]
    echo -e "\e[0Ksection_end:`date +%s`:prepare\r\e[0K"
    echo -e "\e[0Ksection_start:`date +%s`:test\r\e[0KRun the offline tests"
    # Make sure not to run the doctests here because we didn't install all possible extras
    TOIL_TEST_QUICK=True make test_offline tests=src/toil/test threads="${TEST_THREADS}"
    echo -e "\e[0Ksection_end:`date +%s`:test\r\e[0K"

py39_appliance_build:
  rules:
    - if: $CI_PIPELINE_SOURCE == "schedule"
    - if: $CI_COMMIT_TAG
    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
    - if: $CI_COMMIT_BRANCH =~ /.*-fix-ci/
    - if: $CI_COMMIT_BRANCH =~ /.*3\.9.*/
  stage: basic_tests
  cache:
    key: cache-python3.9
    paths:
      - .cache/pip
  script: |
    pwd
    echo -e "\e[0Ksection_start:`date +%s`:prepare\r\e[0KDownload and install all the dependencies, including htcondor"
    python3.9 -m virtualenv venv && . venv/bin/activate && pip install -U pip wheel && pip install -U build && make prepare && pip install pycparser && make develop extras=[all,htcondor]
    echo -e "\e[0Ksection_end:`date +%s`:prepare\r\e[0K"
    # This reads GITLAB_SECRET_FILE_QUAY_CREDENTIALS
    echo -e "\e[0Ksection_start:`date +%s`:docker\r\e[0KBuild a source distribution and then build the docker containers"
    python setup_gitlab_docker.py
    make push_docker
    echo -e "\e[0Ksection_end:`date +%s`:docker\r\e[0K"

py39_main:
  rules:
    - if: $CI_PIPELINE_SOURCE == "schedule"
    - if: $CI_COMMIT_TAG
    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
    - if: $CI_COMMIT_BRANCH =~ /.*-fix-ci/
    - if: $CI_COMMIT_BRANCH =~ /.*3\.9.*/
  stage: basic_tests
  cache:
    key: cache-python3.9
    paths:
      - .cache/pip
  script: |
    pwd
    echo -e "\e[0Ksection_start:`date +%s`:prepare\r\e[0KDownload and install all the dependencies, including htcondor"
    python3.9 -m virtualenv venv && . venv/bin/activate && pip install -U pip wheel && make prepare && make develop extras=[all,htcondor]
    echo -e "\e[0Ksection_end:`date +%s`:prepare\r\e[0K"
    echo -e "\e[0Ksection_start:`date +%s`:test\r\e[0KRun the tests"
    make doctest threads="${TEST_THREADS}"
    make test threads="${TEST_THREADS}" tests="src/toil/test/src src/toil/test/utils src/toil/test/server"
    TOIL_SKIP_DOCKER=true make test threads="${TEST_THREADS}" tests="src/toil/test/lib"
    echo -e "\e[0Ksection_end:`date +%s`:test\r\e[0K"

py310_appliance_build:
  rules:
    - if: $CI_PIPELINE_SOURCE == "schedule"
    - if: $CI_COMMIT_TAG
    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
    - if: $CI_COMMIT_BRANCH =~ /.*-fix-ci/
    - if: $CI_COMMIT_BRANCH =~ /.*3\.10.*/
  stage: basic_tests
  cache:
    key: cache-python3.10
    paths:
      - .cache/pip
  script: |
    pwd
    echo -e "\e[0Ksection_start:`date +%s`:prepare\r\e[0KDownload and install all the dependencies, including htcondor"
    python3.10 -m virtualenv venv && . venv/bin/activate && curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10 &&  pip install -U pip wheel && pip install -U build && make prepare && pip install pycparser && make develop extras=[all,htcondor]
    echo -e "\e[0Ksection_end:`date +%s`:prepare\r\e[0K"
    # This reads GITLAB_SECRET_FILE_QUAY_CREDENTIALS
    echo -e "\e[0Ksection_start:`date +%s`:docker\r\e[0KBuild a source distribution and then build the docker containers"
    python setup_gitlab_docker.py
    make push_docker
    echo -e "\e[0Ksection_end:`date +%s`:docker\r\e[0K"

py310_main:
  rules:
    - if: $CI_PIPELINE_SOURCE == "schedule"
    - if: $CI_COMMIT_TAG
    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
    - if: $CI_COMMIT_BRANCH =~ /.*-fix-ci/
    - if: $CI_COMMIT_BRANCH =~ /.*3\.10.*/
  stage: basic_tests
  cache:
    key: cache-python3.10
    paths:
      - .cache/pip
  script: |
    pwd
    echo -e "\e[0Ksection_start:`date +%s`:prepare\r\e[0KDownload and install all the dependencies, including htcondor"
    python3.10 -m virtualenv venv && . venv/bin/activate && curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10 && pip install -U pip wheel && make prepare && pip install pycparser && make develop extras=[all,htcondor]
    echo -e "\e[0Ksection_end:`date +%s`:prepare\r\e[0K"
    echo -e "\e[0Ksection_start:`date +%s`:test\r\e[0KRun the tests"
    make doctest threads="${TEST_THREADS}"
    make test threads="${TEST_THREADS}" tests="src/toil/test/src src/toil/test/utils src/toil/test/server"
    TOIL_SKIP_DOCKER=true make test threads="${TEST_THREADS}" tests="src/toil/test/lib"
    echo -e "\e[0Ksection_end:`date +%s`:test\r\e[0K"

py311_appliance_build:
  rules:
    - if: $CI_PIPELINE_SOURCE == "schedule"
    - if: $CI_COMMIT_TAG
    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
    - if: $CI_COMMIT_BRANCH =~ /.*-fix-ci/
    - if: $CI_COMMIT_BRANCH =~ /.*3\.11.*/
  stage: basic_tests
  cache:
    key: cache-python3.11
    paths:
      - .cache/pip
  script: |
    pwd
    echo -e "\e[0Ksection_start:`date +%s`:prepare\r\e[0KDownload and install all the dependencies, including htcondor"
    python3.11 -m virtualenv venv && . venv/bin/activate && curl -sS https://bootstrap.pypa.io/get-pip.py | python3.11 &&  pip install -U pip wheel && pip install -U build && make prepare && pip install pycparser && make develop extras=[all,htcondor]
    echo -e "\e[0Ksection_end:`date +%s`:prepare\r\e[0K"
    # This reads GITLAB_SECRET_FILE_QUAY_CREDENTIALS
    echo -e "\e[0Ksection_start:`date +%s`:docker\r\e[0KBuild a source distribution and then build the docker containers"
    python setup_gitlab_docker.py
    make push_docker
    echo -e "\e[0Ksection_end:`date +%s`:docker\r\e[0K"

py311_main:
  rules:
    - if: $CI_PIPELINE_SOURCE == "schedule"
    - if: $CI_COMMIT_TAG
    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
    - if: $CI_COMMIT_BRANCH =~ /.*-fix-ci/
    - if: $CI_COMMIT_BRANCH =~ /.*3\.11.*/
  stage: basic_tests
  cache:
    key: cache-python3.11
    paths:
      - .cache/pip
  script: |
    pwd
    echo -e "\e[0Ksection_start:`date +%s`:prepare\r\e[0KDownload and install all the dependencies, including htcondor"
    python3.11 -m virtualenv venv && . venv/bin/activate && curl -sS https://bootstrap.pypa.io/get-pip.py | python3.11 && pip install -U pip wheel && make prepare && make develop extras=[all,htcondor]
    echo -e "\e[0Ksection_end:`date +%s`:prepare\r\e[0K"
    echo -e "\e[0Ksection_start:`date +%s`:test\r\e[0KRun the tests"
    make doctest threads="${TEST_THREADS}"
    make test threads="${TEST_THREADS}" tests="src/toil/test/src src/toil/test/utils src/toil/test/server"
    TOIL_SKIP_DOCKER=true make test threads="${TEST_THREADS}" tests="src/toil/test/lib"
    echo -e "\e[0Ksection_end:`date +%s`:test\r\e[0K"

py312_appliance_build:
  rules:
    - if: $CI_PIPELINE_SOURCE == "schedule"
    - if: $CI_COMMIT_TAG
    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
    - if: $CI_COMMIT_BRANCH =~ /.*-fix-ci/
    - if: $CI_COMMIT_BRANCH =~ /.*3\.12.*/
  stage: basic_tests
  cache:
    key: cache-python3.12
    paths:
      - .cache/pip
  script: |
    pwd
    echo -e "\e[0Ksection_start:`date +%s`:prepare\r\e[0KDownload and install all the dependencies, including htcondor"
    python3.12 -m virtualenv venv && . venv/bin/activate && curl -sS https://bootstrap.pypa.io/get-pip.py | python3.12 &&  pip install -U pip wheel && pip install -U build && make prepare && pip install pycparser && make develop extras=[all,htcondor]
    echo -e "\e[0Ksection_end:`date +%s`:prepare\r\e[0K"
    # This reads GITLAB_SECRET_FILE_QUAY_CREDENTIALS
    echo -e "\e[0Ksection_start:`date +%s`:docker\r\e[0KBuild a source distribution and then build the docker containers"
    python setup_gitlab_docker.py
    make push_docker
    echo -e "\e[0Ksection_end:`date +%s`:docker\r\e[0K"

py312_main:
  rules:
    - if: $CI_PIPELINE_SOURCE == "schedule"
    - if: $CI_COMMIT_TAG
    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
    - if: $CI_COMMIT_BRANCH =~ /.*-fix-ci/
    - if: $CI_COMMIT_BRANCH =~ /.*3\.12.*/
  stage: basic_tests
  cache:
    key: cache-python3.12
    paths:
      - .cache/pip
  script: |
    pwd
    echo -e "\e[0Ksection_start:`date +%s`:prepare\r\e[0KDownload and install all the dependencies, including htcondor"
    python3.12 -m virtualenv venv && . venv/bin/activate && curl -sS https://bootstrap.pypa.io/get-pip.py | python3.12 && pip install -U pip wheel && make prepare && make develop extras=[all,htcondor]
    echo -e "\e[0Ksection_end:`date +%s`:prepare\r\e[0K"
    echo -e "\e[0Ksection_start:`date +%s`:test\r\e[0KRun the tests"
    make doctest threads="${TEST_THREADS}"
    make test threads="${TEST_THREADS}" tests="src/toil/test/src src/toil/test/utils src/toil/test/server"
    TOIL_SKIP_DOCKER=true make test threads="${TEST_THREADS}" tests="src/toil/test/lib"
    echo -e "\e[0Ksection_end:`date +%s`:test\r\e[0K"

py313_appliance_build:
  stage: basic_tests
  cache:
    key: cache-python3.13
    paths:
      - .cache/pip
  script: |
    pwd
    echo -e "\e[0Ksection_start:`date +%s`:prepare\r\e[0KDownload and install all the dependencies, including htcondor"
    python3.13 -m virtualenv venv && . venv/bin/activate && curl -sS https://bootstrap.pypa.io/get-pip.py | python3.13 &&  pip install -U pip wheel && pip install -U build && make prepare && pip install pycparser && make develop extras=[all]
    echo -e "\e[0Ksection_end:`date +%s`:prepare\r\e[0K"
    # This reads GITLAB_SECRET_FILE_QUAY_CREDENTIALS
    echo -e "\e[0Ksection_start:`date +%s`:docker\r\e[0KBuild a source distribution and then build the docker containers"
    python setup_gitlab_docker.py
    make push_docker
    echo -e "\e[0Ksection_end:`date +%s`:docker\r\e[0K"

py313_main:
  rules:
    - if: $CI_PIPELINE_SOURCE != "schedule"
  stage: basic_tests
  cache:
    key: cache-python3.13
    paths:
      - .cache/pip
  script: |
    pwd
    # todo: htcondor is not out for python 3.13 yet, we don't actively test htcondor batchsystems but should still test an htcondor install
    echo -e "\e[0Ksection_start:`date +%s`:prepare\r\e[0KDownload and install all the dependencies, except htcondor"
    python3.13 -m virtualenv venv && . venv/bin/activate && curl -sS https://bootstrap.pypa.io/get-pip.py | python3.13 && pip install -U pip wheel && make prepare && make develop extras=[all]
    echo -e "\e[0Ksection_end:`date +%s`:prepare\r\e[0K"
    echo -e "\e[0Ksection_start:`date +%s`:test\r\e[0KRun the tests"
    make doctest threads="${TEST_THREADS}"
    make test threads="${TEST_THREADS}" tests="src/toil/test/src src/toil/test/utils src/toil/test/server"
    TOIL_SKIP_DOCKER=true make test threads="${TEST_THREADS}" tests="src/toil/test/lib"
    echo -e "\e[0Ksection_end:`date +%s`:test\r\e[0K"

slurm_test:
  rules:
    - if: $CI_PIPELINE_SOURCE == "schedule"
    - if: $CI_COMMIT_TAG
    - if: $CI_COMMIT_BRANCH =~ /.*-fix-ci/
  stage: integration
  script:
    - pwd
    - cd contrib/slurm-test/
    - docker compose version
    - ./slurm_test.sh

cwl_v1.2:
  rules:
    - if: $CI_PIPELINE_SOURCE == "schedule"
    - if: $CI_COMMIT_TAG
    - if: $CI_COMMIT_BRANCH =~ /.*-fix-ci/
    - if: $CI_COMMIT_BRANCH
      changes:
        compare_to: 'refs/heads/master'
        paths:
          - 'src/toil/cwl/*'
          - 'src/toil/test/cwl/*'
  stage: integration
  cache:
    key: cache-$MAIN_PYTHON_PKG
    paths:
      - .cache/pip
  script: |
    pwd
    echo -e "\e[0Ksection_start:`date +%s`:prepare\r\e[0KDownload and install cwl & aws dependencies"
    ${MAIN_PYTHON_PKG} -m virtualenv venv && . venv/bin/activate && pip install -U pip wheel && make prepare && make develop extras=[cwl,aws]
    python setup_gitlab_docker.py  # login to increase the docker.io rate limit
    echo -e "\e[0Ksection_end:`date +%s`:prepare\r\e[0K"
    # Run CWL integration tests excluded from cwl_misc
    echo -e "\e[0Ksection_start:`date +%s`:test\r\e[0KRun the tests excluded from cwl_misc"
    time make test threads="${TEST_THREADS}" tests="src/toil/test/cwl/cwlTest.py -k 'integrative and not conformance'"
    echo -e "\e[0Ksection_end:`date +%s`:test\r\e[0K"
  artifacts:
    reports:
      junit: "*.junit.xml"
    paths:
    - "*.junit.xml"
    when: always
    expire_in: 14 day

cwl_badge:
  rules:
    - if: $CI_COMMIT_TAG
      when: never
    - if: $CI_PIPELINE_SOURCE == "schedule"
    - if: $CI_COMMIT_BRANCH =~ /.*-fix-ci/
    - if: $CI_COMMIT_BRANCH
      changes:
        compare_to: 'refs/heads/master'
        paths:
          - 'src/toil/cwl/*'
          - 'src/toil/test/cwl/*'
  stage: integration
  cache:
    key: cache-$MAIN_PYTHON_PKG
    paths:
      - .cache/pip
  script: |
    pwd
    echo -e "\e[0Ksection_start:`date +%s`:prepare\r\e[0KDownload and install cwl & aws dependencies"
    ${MAIN_PYTHON_PKG} -m virtualenv venv && . venv/bin/activate && pip install -U pip wheel && make prepare && make develop extras=[cwl,aws]
    python setup_gitlab_docker.py  # login to increase the docker.io rate limit
    echo -e "\e[0Ksection_end:`date +%s`:prepare\r\e[0K"
    echo -e "\e[0Ksection_start:`date +%s`:download_tests\r\e[0KDownload the CWL conformance tests"
    make download_cwl_spec
    rm -rf badges1.2
    echo -e "\e[0Ksection_end:`date +%s`:download_tests\r\e[0K"
    echo -e "\e[0Ksection_start:`date +%s`:test\r\e[0KRun the CWL v1.2 conformance tests."
    make test tests="src/toil/test/cwl/spec_v12/conformance_tests.cwltest.yaml" threads="${TEST_THREADS}" pytest_args="--randomly-dont-reorganize --cwl-runner-verbose --cwl-badgedir=badges1.2 --junit-xml=in-place-update-conformance-1.2.junit.xml --cwl-args='--relax-path-checks --clean=always --logDebug --statusWait=10 --retryCount=2 --eval-timeout=600 --bypass-file-store'"
    echo -e "\e[0Ksection_end:`date +%s`:test\r\e[0K"
  allow_failure: true
  artifacts:
    paths:
    - "badges1.2"
    - "*.junit.xml"
    reports:
      junit: "*.junit.xml"
    when: always
    expire_in: 365 day

cwl_badge_release:
  # For releases, keep the conformance badges indefinitely and not the JUnit files.
  # Everything else is same as cwl_badge
  rules:
    - if: $CI_COMMIT_TAG
  stage: integration
  cache:
    key: cache-$MAIN_PYTHON_PKG
    paths:
      - .cache/pip
  script: |
    pwd
    echo -e "\e[0Ksection_start:`date +%s`:prepare\r\e[0KDownload and install cwl & aws dependencies"
    ${MAIN_PYTHON_PKG} -m virtualenv venv && . venv/bin/activate && pip install -U pip wheel && make prepare && make develop extras=[cwl,aws]
    python setup_gitlab_docker.py  # login to increase the docker.io rate limit
    echo -e "\e[0Ksection_end:`date +%s`:prepare\r\e[0K"
    echo -e "\e[0Ksection_start:`date +%s`:download_tests\r\e[0KDownload the CWL conformance tests"
    make download_cwl_spec
    rm -rf badges1.2
    echo -e "\e[0Ksection_end:`date +%s`:download_tests\r\e[0K"
    echo -e "\e[0Ksection_start:`date +%s`:test\r\e[0KRun the CWL v1.2 conformance tests."
    make test tests="src/toil/test/cwl/spec_v12/conformance_tests.cwltest.yaml" threads="${TEST_THREADS}" pytest_args="--randomly-dont-reorganize --cwl-runner-verbose --cwl-badgedir=badges1.2 --junit-xml=in-place-update-conformance-1.2.junit.xml --cwl-args='--relax-path-checks --clean=always --logDebug --statusWait=10 --retryCount=2 --eval-timeout=600 --bypass-file-store'"
    echo -e "\e[0Ksection_end:`date +%s`:test\r\e[0K"
  allow_failure: true
  artifacts:
    paths:
    - "badges1.2"
    when: always
    expire_in: never

cwl_on_arm:
  rules:
    - if: $CI_PIPELINE_SOURCE == "schedule"
    - if: $CI_COMMIT_TAG
    - if: $CI_COMMIT_BRANCH =~ /.*-fix-ci/
    - if: $CI_COMMIT_BRANCH
      changes:
        compare_to: 'refs/heads/master'
        paths:
          - 'src/toil/cwl/*'
  stage: integration
  script:
    - pwd
    - ${MAIN_PYTHON_PKG} -m virtualenv venv && . venv/bin/activate && pip install -U pip wheel && make prepare && make develop extras=[cwl,aws]
    - python setup_gitlab_docker.py  # login to increase the docker.io rate limit
    # This reads GITLAB_SECRET_FILE_SSH_KEYS
    - python setup_gitlab_ssh.py
    - chmod 400 /root/.ssh/id_rsa
    # Run CWL conformance tests, on an ARM cluster on AWS, using the file store
    - make test threads="${TEST_THREADS}" tests=src/toil/test/provisioners/clusterTest.py::CWLOnARMTest
  artifacts:
    reports:
      junit: "*.junit.xml"
    paths:
    - "*.junit.xml"
    when: always
    expire_in: 14 day

cwl_misc:
  rules:
    - if: $CI_PIPELINE_SOURCE != "schedule"
    - if: $CI_COMMIT_TAG
    - if: $CI_COMMIT_BRANCH =~ /.*-fix-ci/
  stage: main_tests
  cache:
    key: cache-$MAIN_PYTHON_PKG
    paths:
      - .cache/pip
  script: |
    pwd
    echo -e "\e[0Ksection_start:`date +%s`:prepare\r\e[0KDownload and install cwl & aws dependencies"
    ${MAIN_PYTHON_PKG} -m virtualenv venv && . venv/bin/activate && pip install -U pip wheel && make prepare && make develop extras=[cwl,aws]
    python setup_gitlab_docker.py  # login to increase the docker.io rate limit
    echo -e "\e[0Ksection_end:`date +%s`:prepare\r\e[0K"
    echo -e "\e[0Ksection_start:`date +%s`:test\r\e[0KRun the miscellaneous CWL tests"
    make test threads="${TEST_THREADS}" tests="src/toil/test/cwl/cwlTest.py -k 'not integrative and not conformance'"
    echo -e "\e[0Ksection_end:`date +%s`:test\r\e[0K"

#cwl_v1.2_kubernetes:
#  stage: main_tests
#  script:
#    - pwd
#    - ${MAIN_PYTHON_PKG} -m virtualenv venv && . venv/bin/activate && pip install -U pip wheel && make prepare && make develop extras=[cwl,aws,kubernetes]
#    - export TOIL_KUBERNETES_OWNER=toiltest
#    - export TOIL_AWS_SECRET_NAME=shared-s3-credentials
#    - export TOIL_KUBERNETES_HOST_PATH=/data/scratch
#    - export TOIL_WORKDIR=/var/lib/toil
#    - export SINGULARITY_CACHEDIR=/var/lib/toil/singularity-cache
#    - if [[ ! -z "${KUBERNETES_DOCKER_HUB_MIRROR}" ]] ; then export SINGULARITY_DOCKER_HUB_MIRROR="${KUBERNETES_DOCKER_HUB_MIRROR}" ; fi
#    - mkdir -p ${TOIL_WORKDIR}
#    - make test threads="${TEST_THREADS}" tests="src/toil/test/cwl/cwlTest.py::TestCWLv12::test_kubernetes_cwl_conformance src/toil/test/cwl/cwlTest.py::TestCWLv12::test_kubernetes_cwl_conformance_with_caching"
#  artifacts:
#    reports:
#      junit: "*.junit.xml"
#    paths:
#      - "*.junit.xml"
#    when: always
#    expire_in: 14 days

wdl:
  rules:
    - if: $CI_PIPELINE_SOURCE != "schedule"
  stage: main_tests
  cache:
    key: cache-$MAIN_PYTHON_PKG
    paths:
      - .cache/pip
  script: |
    pwd
    echo -e "\e[0Ksection_start:`date +%s`:prepare\r\e[0KDownload and install default-jew, and all the Python dependencies except htcondor"
    apt update && apt install -y default-jre
    ${MAIN_PYTHON_PKG} -m virtualenv venv && . venv/bin/activate && pip install -U pip wheel && make prepare && make develop extras=[all]
    echo -e "\e[0Ksection_end:`date +%s`:prepare\r\e[0K"
    echo -e "\e[0Ksection_start:`date +%s`:test\r\e[0KRun the WDL tests"
    make test threads="${TEST_THREADS}" marker="${MARKER}" tests=src/toil/test/wdl/
    echo -e "\e[0Ksection_end:`date +%s`:test\r\e[0K"

jobstore:
  rules:
    - if: $CI_PIPELINE_SOURCE != "schedule"
  stage: main_tests
  cache:
    key: cache-$MAIN_PYTHON_PKG
    paths:
      - .cache/pip
  script:
    - pwd
    - ${MAIN_PYTHON_PKG} -m virtualenv venv && . venv/bin/activate && pip install -U pip wheel && make prepare && make develop extras=[all]
    - make test threads="${TEST_THREADS}" marker="${MARKER}" tests="src/toil/test/jobStores/jobStoreTest.py src/toil/test/sort/sortTest.py"

provisioner:
  rules:
    - if: $CI_PIPELINE_SOURCE != "schedule"
  stage: main_tests
  cache:
    key: cache-$MAIN_PYTHON_PKG
    paths:
      - .cache/pip
  script:
    - pwd
    - ${MAIN_PYTHON_PKG} -m virtualenv venv && . venv/bin/activate && pip install -U pip wheel && make prepare && make develop extras=[all]
    - make test threads="${TEST_THREADS}" marker="${MARKER}" tests="src/toil/test/lib/aws/ src/toil/test/provisioners/aws/awsProvisionerTest.py src/toil/test/provisioners/clusterScalerTest.py"

# https://ucsc-ci.com/databiosphere/toil/-/jobs/38672
# guessing decorators are masking class as function?  ^  also, abstract class is run as normal test?  should hide.

jobstore_integration:
  rules:
    - if: $CI_PIPELINE_SOURCE == "schedule"
    - if: $CI_COMMIT_TAG
    - if: $CI_COMMIT_BRANCH =~ /.*-fix-ci/
  stage: integration
  cache:
    key: cache-$MAIN_PYTHON_PKG
    paths:
      - .cache/pip
  script:
    - pwd
    - ${MAIN_PYTHON_PKG} -m virtualenv venv && . venv/bin/activate && pip install -U pip wheel && make prepare && make develop extras=[all]
    - export TOIL_TEST_INTEGRATIVE=True
    - export TOIL_AWS_KEYNAME=id_rsa
    - export TOIL_AWS_ZONE=us-west-2a
    # This reads GITLAB_SECRET_FILE_SSH_KEYS
    - python setup_gitlab_ssh.py
    - chmod 400 /root/.ssh/id_rsa
    - make test threads="${TEST_THREADS}" tests="src/toil/test/jobStores/jobStoreTest.py"

server_integration:
  rules:
    - if: $CI_PIPELINE_SOURCE == "schedule"
    - if: $CI_COMMIT_TAG
    - if: $CI_COMMIT_BRANCH =~ /.*-fix-ci/
    - if: $CI_COMMIT_BRANCH
      changes:
        compare_to: 'refs/heads/master'
        paths:
          - 'src/toil/server/*'
          - 'src/toil/test/server/*'
  stage: integration
  cache:
    key: cache-$MAIN_PYTHON_PKG
    paths:
      - .cache/pip
  script:
    - pwd
    - ${MAIN_PYTHON_PKG} -m virtualenv venv && . venv/bin/activate && pip install -U pip wheel && make prepare && make develop extras=[all]
    - export TOIL_TEST_INTEGRATIVE=True
    - export TOIL_AWS_KEYNAME=id_rsa
    - export TOIL_AWS_ZONE=us-west-2a
    # This reads GITLAB_SECRET_FILE_SSH_KEYS
    - python setup_gitlab_ssh.py
    - chmod 400 /root/.ssh/id_rsa
    # Test server and its integration with AWS
    - make test threads="${TEST_THREADS}" tests="src/toil/test/server -k 'integrative'"

provisioner_integration:
  rules:
    - if: $CI_PIPELINE_SOURCE == "schedule"
    - if: $CI_COMMIT_TAG
    - if: $CI_COMMIT_BRANCH =~ /.*-fix-ci/
  stage: integration
  cache:
    key: cache-$MAIN_PYTHON_PKG
    paths:
      - .cache/pip
  script:
    - pwd
    - ${MAIN_PYTHON_PKG} -m virtualenv venv && . venv/bin/activate && pip install -U pip wheel && make prepare && make develop extras=[all]
    - python setup_gitlab_ssh.py && chmod 400 /root/.ssh/id_rsa
    - echo $'Host *\n    AddressFamily inet' > /root/.ssh/config
    - export LIBPROCESS_IP=127.0.0.1
    - python setup_gitlab_docker.py
    - export TOIL_TEST_INTEGRATIVE=True; export TOIL_AWS_KEYNAME=id_rsa; export TOIL_AWS_ZONE=us-west-2a
    # This reads GITLAB_SECRET_FILE_SSH_KEYS
    - python setup_gitlab_ssh.py
    - make test threads="${TEST_THREADS}" tests="src/toil/test/sort/sortTest.py src/toil/test/provisioners/clusterScalerTest.py src/toil/test/utils/utilsTest.py::TestUtils::testAWSProvisionerUtils src/toil/test/provisioners/aws/awsProvisionerTest.py::TestAWSProvisionerBenchTest src/toil/test/provisioners/aws/awsProvisionerTest.py::AWSAutoscaleTest src/toil/test/provisioners/aws/awsProvisionerTest.py::AWSManagedAutoscaleTest src/toil/test/provisioners/aws/awsProvisionerTest.py::AWSStaticAutoscaleTest src/toil/test/wdl/wdltoil_test_kubernetes.py::WDLKubernetesClusterTest"
#    - make test tests=src/toil/test/provisioners/gceProvisionerTest.py  # needs env vars set to run

google_jobstore:
  rules:
    - if: $CI_PIPELINE_SOURCE == "schedule"
    - if: $CI_COMMIT_TAG
    - if: $CI_COMMIT_BRANCH =~ /.*-fix-ci/
  stage: integration
  cache:
    key: cache-$MAIN_PYTHON_PKG
    paths:
      - .cache/pip
  script:
    - pwd
    - ${MAIN_PYTHON_PKG} -m virtualenv venv && . venv/bin/activate && pip install -U pip wheel && make prepare && make develop extras=[all]
    - python setup_gitlab_ssh.py && chmod 400 /root/.ssh/id_rsa
    - echo $'Host *\n    AddressFamily inet' > /root/.ssh/config
    - export LIBPROCESS_IP=127.0.0.1
    - export TOIL_TEST_INTEGRATIVE=True
    - export GOOGLE_APPLICATION_CREDENTIALS=$GOOGLE_CREDENTIALS
    - export TOIL_GOOGLE_KEYNAME=id_rsa
    - export TOIL_GOOGLE_PROJECTID=toil-dev
    - make test threads="${TEST_THREADS}" tests=src/toil/test/jobStores/jobStoreTest.py::GoogleJobStoreTest

mesos:
  rules:
    - if: $CI_PIPELINE_SOURCE == "schedule"
    - if: $CI_COMMIT_TAG
    - if: $CI_COMMIT_BRANCH =~ /.*-fix-ci/
    - if: $CI_COMMIT_BRANCH
      changes:
        compare_to: 'refs/heads/master'
        paths:
          - 'src/toil/test/mesos/*'
          - 'src/toil/batchSystems/mesos/*'
  stage: integration
  cache:
    key: cache-python3.10
    paths:
      - .cache/pip
  script:
    - pwd
    - python3.10 -m virtualenv venv && . venv/bin/activate && pip install -U pip wheel && make prepare && make develop extras=[mesos,google,aws]
    - python setup_gitlab_ssh.py && chmod 400 /root/.ssh/id_rsa
    - echo $'Host *\n    AddressFamily inet' > /root/.ssh/config
    - export LIBPROCESS_IP=127.0.0.1
    - export TOIL_TEST_INTEGRATIVE=True
    - export TOIL_AWS_KEYNAME=id_rsa
    - export TOIL_AWS_ZONE=us-west-2a
    - export GOOGLE_APPLICATION_CREDENTIALS=$GOOGLE_CREDENTIALS
    - export TOIL_GOOGLE_KEYNAME=id_rsa
    - export TOIL_GOOGLE_PROJECTID=toil-dev
    - make test threads="${TEST_THREADS}" tests="src/toil/test/mesos/MesosDataStructuresTest.py::DataStructuresTest src/toil/test/batchSystems/batchSystemTest.py::MesosBatchSystemTest src/toil/test/sort/sortTest.py::SortTest::testAwsMesos src/toil/test/sort/sortTest.py::SortTest::testFileMesos src/toil/test/sort/sortTest.py::SortTest::testGoogleMesos src/toil/test/cwl/cwlTest.py::TestCWLv10Conformance::test_mesos_cwl_conformance src/toil/test/cwl/cwlTest.py::TestCWLv10Conformance::test_mesos_cwl_conformance_with_caching src/toil/test/src/promisedRequirementTest.py::TestMesosPromisedRequirements src/toil/test/provisioners/aws/awsProvisionerTest.py::AWSAutoscaleTest src/toil/test/provisioners/aws/awsProvisionerTest.py::AWSStaticAutoscaleTest src/toil/test/provisioners/aws/awsProvisionerTest.py::AWSAutoscaleTestMultipleNodeTypes src/toil/test/provisioners/aws/awsProvisionerTest.py::AWSRestartTest::testAutoScaledCluster"

batchsystem:
  rules:
      - if: $CI_PIPELINE_SOURCE == "schedule"
      - if: $CI_COMMIT_TAG
      - if: $CI_COMMIT_BRANCH =~ /.*-fix-ci/
      - if: $CI_COMMIT_BRANCH
        changes:
          compare_to: 'refs/heads/master'
          paths:
            - 'src/toil/test/batchSystems/test_gridengine.py'
            - 'src/toil/batchSystems/gridengine.py'
  stage: integration
  cache:
    key: cache-$MAIN_PYTHON_PKG
    paths:
      - .cache/pip
  script:
    - ${MAIN_PYTHON_PKG} -m virtualenv venv && . venv/bin/activate && pip install -U pip wheel && make prepare && make develop extras=[all]
    - make test threads="${TEST_THREADS}" tests=src/toil/test/batchSystems/test_gridengine.py::GridEngineTest

# Cactus-on-Kubernetes integration (as a script and not a pytest test)
cactus_integration:
  rules:
    - if: $CI_PIPELINE_SOURCE == "schedule"
    - if: $CI_COMMIT_TAG
    - if: $CI_COMMIT_BRANCH =~ /.*-fix-ci/
    - if: $CI_COMMIT_BRANCH
      changes:
        compare_to: 'refs/heads/master'
        paths:
          - 'src/toil/test/cactus/test_cactus_integration.py'
  stage: integration
  cache:
    key: cache-$MAIN_PYTHON_PKG
    paths:
      - .cache/pip
  script:
    - export CACTUS_COMMIT_SHA=03295e9af99e2e9168ccd02e78a9f4c0c8dcd490
    - set -e
    - ${MAIN_PYTHON_PKG} -m virtualenv venv && . venv/bin/activate && make prepare && make develop extras=[aws]
    - python setup_gitlab_docker.py  # login to increase the docker.io rate limit
    # This reads GITLAB_SECRET_FILE_SSH_KEYS
    - python setup_gitlab_ssh.py
    - chmod 400 /root/.ssh/id_rsa
    - make test threads="${TEST_THREADS}" tests=src/toil/test/cactus/test_cactus_integration.py