Compare commits

...

289 Commits

Author SHA1 Message Date
Mirco f4da27f63f Merge branch 'main' of ssh://repo.prod.meissa.de:2222/meissa/dda-devops-build 2 weeks ago
Mirco e764534487 sequence chart retsic-management.clj 2 weeks ago
bom 6093d160e8 bump version to: 4.12.1-dev 3 weeks ago
bom 4d8dc95d8e release: 4.12.0 3 weeks ago
bom e6f39eab21 Add support for hetzner csi 3 weeks ago
Mirco 0cb4bc43f9 store state passwords in map & edn 2 months ago
Mirco 5b5bc0ab96 install restic_management.clj to image 2 months ago
Mirco b139865f89 bump version to: 4.11.11-dev 2 months ago
Mirco e4fa06fc42 release: 4.11.10 2 months ago
Mirco 4fa849b72b check with version 2 months ago
Mirco 48bbbe6f6e refactoring clj-cljs image install.sh 2 months ago
Mirco bf843edb80 refactoring image install.sh checksum function graalvm kubeconform 2 months ago
Mirco 5e8c21c521 dev-notes first steps 2 months ago
Mirco 3bc3a0cd7e Statemachine credRot devnotes 2 months ago
Michael Jerger 56bc215f26 read / write state 2 months ago
Michael Jerger 678b75ae6f ignore 2 months ago
Mirco d133b281f9 refactoring installing ba*bash*ka 2 months ago
Mirco 581449fba4 Merge branch 'main' of ssh://repo.prod.meissa.de:2222/meissa/dda-devops-build 2 months ago
Mirco b38876d9ef ba*bash*ka binary install to image 2 months ago
bom 90c4d4ec9d bump version to: 4.11.10-dev 3 months ago
bom c0daa85612 release: 4.11.9 3 months ago
bom 0bdd13cf8a Add restic to base backup image 3 months ago
bom 1bba35963a bump version to: 4.11.9-dev 3 months ago
bom 2b7fe54f76 release: 4.11.8 3 months ago
Michael Jerger e96581754c move backup image to devops-build 3 months ago
bom 4034b0022b bump version to: 4.11.8-dev 3 months ago
bom 87cc56cdd7 release: 4.11.7 3 months ago
bom a9d98a6d0c Use returncode to check if process failed 3 months ago
bom 671a3b8cbb bump version to: 4.11.7-dev 3 months ago
bom 011fc848af release: 4.11.6 3 months ago
bom c5af5c9198 Raise error if process fails in execute_live 3 months ago
bom afec1fdd0c bump version to: 4.11.6-dev 3 months ago
bom 6513e00e54 release: 4.11.5 3 months ago
bom 3cc6206d06 Extend execute_live to handle input
use the new functionality for docker drun
3 months ago
Michael Jerger 48452baac5 bump version to: 4.11.5-dev 4 months ago
Michael Jerger 5875642777 release: 4.11.4 4 months ago
Michael Jerger 58d8d46a0c initialize jvm build tools 4 months ago
Michael Jerger 6e57204ce5 bump version to: 4.11.4-dev 4 months ago
Michael Jerger d3f1204932 release: 4.11.3 4 months ago
Michael Jerger 305c9a6bd0 there is no jdk21 4 months ago
Michael Jerger 751bc26a21 bump version to: 4.11.3-dev 4 months ago
Michael Jerger 202c07150c release: 4.11.2 4 months ago
Michael Jerger f5c75a31f5 use 4.10.7 4 months ago
Michael Jerger 7c24477348 java 21 4 months ago
Michael Jerger 237691f79f bump version to: 4.11.2-dev 4 months ago
Michael Jerger c6c8de9b0f release: 4.11.1 4 months ago
Michael Jerger a7a00756bc fix the bootstrap 4 months ago
Michael Jerger e822e3d0f0 bump version to: 4.11.1-dev 4 months ago
Michael Jerger bbd12cda4e release: 4.11.0 4 months ago
Michael Jerger adcf93d321 fix java home 4 months ago
Michael Jerger c9df6082ae Merge branch 'main' of ssh://repo.prod.meissa.de:2222/meissa/dda-devops-build 4 months ago
Michael Jerger 5825bcbf47 update graal version 4 months ago
ansgarz b8f7b72a61 chg README.md 4 months ago
ansgarz f4d30706b7 fix typo 4 months ago
ansgarz 72cd6a65d2 add link to example project to README.md 4 months ago
ansgarz fde5061bf3 update docs 5 months ago
Michael Jerger 0dcb375e15 bump version to: 4.10.9-dev 5 months ago
Michael Jerger c6872458e2 release: 4.10.8 5 months ago
Mirco a5a653b213 [skip-ci] infrastructure/../test folder removed 5 months ago
Mirco f12d43b9bc bump version to: 4.10.8-dev 5 months ago
Mirco 5f18c7ddb3 release: 4.10.7 5 months ago
Mirco c92923f6b9 [skip-ci] fix 5 months ago
Mirco 2aec5b44d2 fix:: ignore missing image/test folder 5 months ago
Mirco 52622b1f2d bump version to: 4.10.7-dev 5 months ago
Mirco 822bebc26f release: 4.10.6 5 months ago
Mirco 0ba623c560 ignore missing image/test folder 5 months ago
Mirco 147cb1bd72 bump version to: 4.10.6-dev 5 months ago
Mirco 56dc19322a release: 4.10.5 5 months ago
Mirco ee20925e04 gitlabci.yml use dind/python 4.10.5 5 months ago
Mirco 2455ea0ff0 bump version to: 4.10.5-dev 5 months ago
Mirco 739940cdd3 release: 4.10.4 5 months ago
Mirco 47dc81860a bump version to: 4.10.4-dev 6 months ago
Mirco 308c86595c release: 4.10.3 6 months ago
Mirco 45745f3ebd fix gitlab-ci.yml version 6 months ago
Mirco 5b5249eb55 usage of install_functions_debian/alpine.sh 6 months ago
Mirco d2a3e60edf bump version to: 4.10.3-dev 6 months ago
Mirco 8324727b8a release: 4.10.2 6 months ago
Mirco c79c0c1a7f bump version to: 4.10.2-dev 6 months ago
Mirco d5c9bae212 release: 4.10.1 6 months ago
Mirco 2914be8a88 better structuring to docker image building 6 months ago
Mirco bfc55293dc Adapt usage ddadevops-dind/python version 6 months ago
Mirco 9a0573bc33 bump version to: 4.10.1-dev 6 months ago
Mirco 701047fd4f release: 4.10.0 6 months ago
Mirco 9638450ef6 Improvements docker image building 6 months ago
Mirco d90f68ce1a Merge branch 'main' of ssh://repo.prod.meissa.de:2222/meissa/dda-devops-build 6 months ago
Mirco f3f51b165a use execution_api for self checking docker image 6 months ago
bom 29d439e82a bump version to: 4.9.4-dev 6 months ago
bom 4d8961557c release: 4.9.3 6 months ago
bom 3e49e31e66 Add kotlin image to ci 6 months ago
bom 5053b79ad4 Make version regex for python and gradle more concrete
Check that "version" is the start of the string to avoid changing cases like
"kotlin_version = ..."
6 months ago
bom 215a9bf0fe Add regression test for malformed version in clj 7 months ago
bom 97978e9950 bump version to: 4.9.3-dev 7 months ago
bom 7375ba16cc release: 4.9.2 7 months ago
bom 07cf837ac6 Simplify set/get version functions 7 months ago
bom 6399a1dfeb Update regexes for gradle and python files 7 months ago
bom 36df9f486b Cleanup
Fix typos, remove old todos
7 months ago
bom 617d909438 Replace clojure version regex 7 months ago
bom bcccfd8b9c Move regex string to a function
Avoids having duplicates in similar functions
7 months ago
bom f7897a574d bump version to: 4.9.2-dev 7 months ago
bom c6217bd0a2 release: 4.9.1 7 months ago
bom e5d1203435 Update version suffix to fit build file type
Resolves a bug where when using "build.py" as the primary build file
and some other secondary file like "build.gradle",
would result in the ".gradle" file version having a "-dev" suffix
 instead of "-SNAPSHOT"

Includes regression test
7 months ago
bom 78824ea38b Extend GitApiMock with push_follow_tags 7 months ago
Michael Jerger 288247be8e bump version to: 4.9.1-dev 7 months ago
Michael Jerger bc06c34ea3 release: 4.9.0 7 months ago
Michael Jerger 03c4229cf0 Merge branch 'main' of ssh://repo.prod.meissa.de:2222/meissa/dda-devops-build 7 months ago
Michael Jerger 32ae4f2a6f bump version to: 4.8.1-dev 7 months ago
Michael Jerger 3bbae609d7 release: 4.8.0 7 months ago
jem b8f6129146 Merge pull request 'kotlin-compile' (#1) from kotlin-compile into main
Reviewed-on: #1
7 months ago
Michael Jerger ef2efc40f7 updates for kotlin build 7 months ago
Michael Jerger b3a612c938 Merge branch 'main' into kotlin-compile 7 months ago
Clemens edd2ae5743 bump version to: 4.7.5-dev 8 months ago
Clemens fc92260a43 release: 4.7.4 8 months ago
Michael Jerger 7e5e66d933 fix doc 8 months ago
Michael Jerger 9fdd81d4b0 bump version to: 4.7.4-dev 8 months ago
Michael Jerger 5d81903870 release: 4.7.3 8 months ago
Michael Jerger d643bba325 add some cleanup 8 months ago
Michael Jerger 4c0524aafe bump version to: 4.7.3-dev 8 months ago
Michael Jerger 1db263d13f release: 4.7.2 8 months ago
Michael Jerger 58bfd98af9 bump version to: 4.7.2-dev 8 months ago
Michael Jerger d3e8c19f02 release: 4.7.1 8 months ago
Michael Jerger bdca4f224f add new function to doc 10 months ago
Michael Jerger 331c57a952 gitlab ci is no longer used 10 months ago
Michael Jerger cea54b0945 added doc for creating artifacts 10 months ago
Michael Jerger d8396402b5 bootstrap with dev no longer needed 10 months ago
Michael Jerger 1afa34dba3 required for releasing 10 months ago
Michael Jerger a5923aef5f use released build-container 10 months ago
Michael Jerger c669d8f7b5 bump version to: 4.7.1-dev 10 months ago
Michael Jerger 22397a369e release: 4.7.0 10 months ago
Michael Jerger d5c711fa88 use current build container 10 months ago
Michael Jerger 63a043dfee bump version to: 4.6.1-dev 10 months ago
Michael Jerger 3ccac186c9 release: 4.6.0 10 months ago
Michael Jerger 686c703f7d minor cleanup 10 months ago
Michael Jerger 17d93c34ec adjust for buildtest 10 months ago
Michael Jerger 95ca351ae8 bump version to: 4.5.5-dev 10 months ago
Michael Jerger d15195a8e0 release: 4.5.4 10 months ago
Michael Jerger b3841a6801 bump version to: 4.5.4-dev 10 months ago
Michael Jerger 3d83ec19e1 release: 4.5.3 10 months ago
Michael Jerger e7000ec408 Handover the corect token? 10 months ago
Michael Jerger bf74fd1a73 bump version to: 4.5.3-dev 10 months ago
Michael Jerger 954a2f735b release: 4.5.2 10 months ago
Michael Jerger f8ec35860a add more debugging info 10 months ago
Michael Jerger d67dfb7378 bump version to: 4.5.2-dev 10 months ago
Michael Jerger 6dc10b77e7 release: 4.5.1 10 months ago
Michael Jerger b77a8fd67d add curl to py build image 10 months ago
Michael Jerger 951e66776d bump version to: 4.5.1-dev 10 months ago
Michael Jerger 1a946151b7 release: 4.5.0 10 months ago
Michael Jerger 8132958b16 preparing bugfix release 10 months ago
Michael Jerger fea084c09a fix escaping 10 months ago
Michael Jerger ad6287aafe bump version to: 4.4.1-dev 10 months ago
Michael Jerger 509216c001 release: 4.4.0 10 months ago
Michael Jerger 62e3f58f81 Merge branch 'artifact-mixin' into 'main'
Release creation on forgejo targets

See merge request domaindrivenarchitecture/dda-devops-build!18
10 months ago
Michael Jerger 898b8e8900 use new version 10 months ago
Michael Jerger 17ae55f3c0 fix drun & improve env rekognition 10 months ago
Michael Jerger ec0844d53b use new ddadevops 10 months ago
Michael Jerger b6b2e6d9b5 add env for all jobs 10 months ago
erik 46b8172f11 Use v4.3.1 10 months ago
erik d160c551e1 Use v4.3.0 10 months ago
erik 1f2e1d9569 Use v4.2.0 10 months ago
erik 66691c9ee9 Add task publish artifacts 10 months ago
erik dce22ba7b3 Remove publish_artifact task for test purpose 10 months ago
erik 25887841a2 Add missing var def 10 months ago
erik 57b88664d2 Rename vars 10 months ago
erik ddb173af46 Change attachment type to path 10 months ago
erik a9d01c5907 Assert that token is a string 10 months ago
erik 46c8a1751c Rename var 10 months ago
erik dea2d7f3ed Ignore line too long 10 months ago
Michael Jerger 06a650fd77 use new version for ci 10 months ago
Michael Jerger 1fff46986e create an empty release 10 months ago
Michael Jerger ab8bb7f400 allow empty artifacts 10 months ago
Michael Jerger 2be0a44aa8 add js 10 months ago
Michael Jerger 7d2d197cbb fix some linting 10 months ago
Michael Jerger 2e24e79a4c introduce artifact class 10 months ago
Michael Jerger d555b34eef add artifact to domain 10 months ago
Michael Jerger f9daf87262 remove no longer neede artifact deployment 10 months ago
Michael Jerger 7aa45910e9 finish register artifacts 10 months ago
Michael Jerger 45884d1032 wip: prepare ataching artifacts - fix the test 10 months ago
Michael Jerger bfcdcbb78a wip: prepare ataching artifacts 10 months ago
Michael Jerger caaf804fd3 add release-endpoint-calculation to domain object 10 months ago
Michael Jerger a876fdc799 add error handling for release_id parsing 10 months ago
Michael Jerger e0150e6fcc parse the release_id from create response 10 months ago
Michael Jerger b861087e9d fix test no longer needs gopass 10 months ago
Michael Jerger 700a0a2f4f add missing informations for creating release 10 months ago
Michael Jerger ee58151a8d initialize default credentials 10 months ago
Michael Jerger f30609288a add token & improve artifact validation 10 months ago
Michael Jerger 2217e5c8d1 add token & improve artifact validation 10 months ago
Michael Jerger 3cfb453454 improve some linting 10 months ago
Michael Jerger 5d2596bd3f improve some linting 10 months ago
Michael Jerger cb41ad0719 update domain doc 10 months ago
erik db8b41be19 Add comments for moving functionality 10 months ago
erik f4be6e0c8b Implement calculation of forgejo release api endpoint url 10 months ago
Michael Jerger 5f5743354c wip: calculate forgejo_release_api_endpoint 10 months ago
erik 2469fd2f5b [Skip-CI] Rename function 10 months ago
erik 39591c8aa9 [Skip-CI] WIP Implement release publishing 10 months ago
erik 337a790044 Implement checksum calculation 10 months ago
erik 935baa9932 Simpler sha calc command 10 months ago
erik 369f62ff38 Correct var names 10 months ago
Michael Jerger 074e77196e wip calculate-sha [skip-ci] 10 months ago
Michael Jerger 88253f49ac use new api in release-service 10 months ago
Michael Jerger 2fc59f105b artifact publish ist part of release now 10 months ago
Michael Jerger 00202edecc add kotlin image 10 months ago
Michael Jerger 31ac285df0 add kotlin image 10 months ago
erik 0952ec57a8 [Skip-CI] Add initial artifact deployment domain object 10 months ago
erik 911158f882 Remove __get_base_artifact_release_url() 10 months ago
bom f3bf8cb335 Add skeleton for ArtifactDeploymentService 10 months ago
bom c02440ac65 Return values of post requests 10 months ago
bom 8d4921ea70 Switch order of function parameters 10 months ago
bom 09bf3f5d44 Switch some quotes and brackets 10 months ago
erik 6dbbb8f2a1 [Skip-CI] Add ArtifactDeploymentApi 10 months ago
bom e6450b796f Add basic artifact service 10 months ago
bom c15503b7a0 Update artifact deployment mixin 10 months ago
erik 2e5e1ea0ee [Skip-Ci] Update process description 10 months ago
Michael Jerger 483c2b8bba bump version to: 4.3.2-dev 10 months ago
Michael Jerger 2c2a88eced release: 4.3.1 10 months ago
Michael Jerger 577c717a87 fix backend init 10 months ago
Michael Jerger 1585f15582 fix linting 10 months ago
erik 9517e9081e [Skip-CI] Collect and sort functional reqs 10 months ago
erik 355bd477af Add initial artifact_deployment_mixin.py 10 months ago
erik 48f7d9c29e [Skip-CI] Add Development and mirrors section 10 months ago
Michael Jerger 3cbf18792b bump version to: 4.3.1-dev 11 months ago
Michael Jerger e38acb39c1 release: 4.3.0 11 months ago
Michael Jerger 03e32418e8 graalvm only for clj 11 months ago
Michael Jerger 1cc5c565f3 mv to new name 11 months ago
Michael Jerger 5a39ac0970 we will use a dedicated image for graalvm 11 months ago
Michael Jerger 777dfff541 bump version to: 4.2.1-dev 11 months ago
Michael Jerger 074e62d16e release: 4.2.0 11 months ago
Michael Jerger 3ccfa81155 remove deprecated image 11 months ago
Michael Jerger 9ffbba0765 add pyb to clojure image 11 months ago
Michael Jerger 4efddcc104 fix the link ? 11 months ago
Michael Jerger b7540151fc fix the link ? 11 months ago
Michael Jerger ca88c63407 doc update 11 months ago
bom 41c41cf9c3 Fix execute_secure call 11 months ago
bom a53c28b690 Use execute_secure in dockerhub_login 11 months ago
bom 8ae03f5811 Implement execute_secure
Allows us to sanitize commands involving sensitive information like
passwords, before throwing exceptions
11 months ago
bom 2ead8cc31b Handle failed execution in execute_live 11 months ago
erik bf2d68bddc Linting 11 months ago
erik e21155fdec Add f-string 11 months ago
erik 43988291c6 Handle errors in execute 11 months ago
Michael Jerger f04477f137 use new images 11 months ago
Michael Jerger 587c8893f0 bump version to: 4.1.1-dev 11 months ago
Michael Jerger aea4a31445 release: 4.1.0 11 months ago
Michael Jerger 7002683a84 Merge branch 'feature/improve-image-build' into 'main'
improve image names

See merge request domaindrivenarchitecture/dda-devops-build!17
11 months ago
Michael Jerger f2fd1e3b07 Merge branch 'main' into feature/improve-image-build 11 months ago
Michael Jerger 0a40e5e213 make release more simple 11 months ago
Michael Jerger 146e11a1de update doc 11 months ago
Michael Jerger a15030adaa fix ci 11 months ago
Michael Jerger 3c9a86f67c build all images 11 months ago
Michael Jerger cb48674864 fix build 11 months ago
Michael Jerger 771ffe5229 mv domain logic to domain 11 months ago
Michael Jerger 355c457d57 add an test 11 months ago
Michael Jerger 9cb77f395f prepare minor 11 months ago
Michael Jerger 8ce3c74c34 add some todos 11 months ago
bom e927ec9fcf Merge release tasks into one 11 months ago
bom c93875f491 Pin devops-build Image version 11 months ago
bom 459a182388 Remove redundant pip install 11 months ago
bom 1d9674b097 Use correct CI variable for Image tag 11 months ago
bom 0219127416 Add docker service to CI
Enables CI to actually build docker images
11 months ago
bom 5c3d07e58c Update docker ci build 11 months ago
erik 8b11a4fa61 Add execute_handled 11 months ago
Michael Jerger 9c44bebd5d use the new image_naming 11 months ago
Michael Jerger 6b1ffb6e99 add some tests 11 months ago
bom b7acaab2a9 Merge release tasks into one 11 months ago
bom ac06c71a1a Pin devops-build Image version 11 months ago
bom 039b9fe2f0 Remove redundant pip install 11 months ago
bom 9016592832 Use correct CI variable for Image tag 11 months ago
erik 6332b738e3 bump version to: 4.0.17-dev 11 months ago
erik 55daeb69f1 release: 4.0.16 11 months ago
bom 153d4bbf9a Add docker service to CI
Enables CI to actually build docker images
11 months ago
erik 1ae9593537 Merge branch 'main' of gitlab.com:domaindrivenarchitecture/dda-devops-build 11 months ago
bom 0a44d6e363 bump version to: 4.0.16-dev 11 months ago
bom 9341c147ae release: 4.0.15 11 months ago
erik a6f6b9715e Add execute_handled 11 months ago
bom e9c6c95299 Update docker ci build 11 months ago
Michael Jerger eb5be21824 add more granulare build images 11 months ago
erik 036eb53972 bump version to: 4.0.15-dev 11 months ago
erik 7ff93b18d5 release: 4.0.14 11 months ago
erik 4bc4af52cd Remove unused import 11 months ago
erik d2bc3cd9f3 Use execute 11 months ago
erik 1cc7221c44 Add DOCKER_HOST 11 months ago
erik 15544e0ba0 Raise exception when encountered 11 months ago
erik cf091b64e3 bump version to: 4.0.14-dev 11 months ago
erik 54f0ad2297 release: 4.0.13 11 months ago
erik b8f55aa10d [Skip-CI] Install newest ddadevops in CI 11 months ago
erik 7a55d0f3a6 [Skip-CI] Install newest ddadevops in CI 11 months ago
erik 93c9a542d9 bump version to: 4.0.13-dev 11 months ago
erik 5755133d89 release: 4.0.12 11 months ago
erik 66c78da0c2 Install linting tools in linting stage 11 months ago
erik a4cdf051ff Linting 11 months ago
erik e1639974e3 Specify raised exceptions 11 months ago
erik 186f057b2b Use more verbose error output 11 months ago

@ -1,42 +0,0 @@
name: stable
on:
push:
tags:
- '[0-9]+.[0-9]+.[0-9]+'
jobs:
build:
name: stable build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Use python 3.x
uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: build stable release
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_DDA }}
run: |
pyb -P version=${{ github.ref }} publish upload
- name: Create GH Release
id: create_release
uses: actions/create-release@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ github.ref }}
release_name: Release ${{ github.ref }}
draft: false
prerelease: false

@ -1,30 +0,0 @@
name: unstable
on:
push:
tags:
- '![0-9]+.[0-9]+.[0-9]+'
jobs:
build:
name: unstable
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Use python 3.x
uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: build unstable release
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_DDA }}
run: |
pyb publish upload

3
.gitignore vendored

@ -109,3 +109,6 @@ venv.bak/
.clj-kondo/
.lsp/
.calva/
.cpcache/
infrastructure/backup/image/resources/backup-repository-state.edn

@ -1,52 +1,96 @@
image: "domaindrivenarchitecture/devops-build:4.0.8"
before_script:
- python --version
- python -m pip install --upgrade pip
- pip install -r requirements.txt
- export IMAGE_TAG=$CI_IMAGE_TAG
- export IMAGE_DOCKERHUB_USER=$DOCKERHUB_USER
- export IMAGE_DOCKERHUB_PASSWORD=$DOCKERHUB_PASSWORD
stages:
- lint&test
- upload
- image
.py: &py
image: "domaindrivenarchitecture/ddadevops-python:4.10.7"
before_script:
- export RELEASE_ARTIFACT_TOKEN=$MEISSA_REPO_BUERO_RW
- python --version
- pip install -r requirements.txt
.img: &img
image: "domaindrivenarchitecture/ddadevops-dind:4.10.7"
services:
- docker:dind
before_script:
- export IMAGE_DOCKERHUB_USER=$DOCKERHUB_USER
- export IMAGE_DOCKERHUB_PASSWORD=$DOCKERHUB_PASSWORD
- export IMAGE_TAG=$CI_COMMIT_TAG
.tag_only: &tag_only
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: never
- if: '$CI_COMMIT_TAG =~ /^[0-9]+\.[0-9]+\.[0-9]+$/'
lint:
<<: *py
stage: lint&test
script:
- pip install -r dev_requirements.txt
- pyb lint
pytest:
<<: *py
stage: lint&test
script:
- pip install -r dev_requirements.txt
- pyb test
pypi-stable:
<<: *py
<<: *tag_only
stage: upload
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: never
- if: '$CI_COMMIT_TAG =~ /^[0-9]+\.[0-9]+\.[0-9]+$/'
script:
- pyb -P version=$CI_COMMIT_TAG publish upload
- pyb -P version=$CI_COMMIT_TAG publish upload publish_artifacts
clojure-image-test-publish:
clj-cljs-image-publish:
<<: *img
<<: *tag_only
stage: image
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: never
- if: '$CI_COMMIT_TAG =~ /^[0-9]+\.[0-9]+\.[0-9]+$/'
script:
- cd infrastructure/clojure && pyb image test publish
- cd infrastructure/clj-cljs && pyb image publish
devops-build-image-test-publish:
clj-image-publish:
<<: *img
<<: *tag_only
stage: image
script:
- cd infrastructure/clj && pyb image publish
python-image-publish:
<<: *img
<<: *tag_only
stage: image
script:
- cd infrastructure/python && pyb image publish
dind-image-publish:
<<: *img
<<: *tag_only
stage: image
script:
- cd infrastructure/dind && pyb image publish
ddadevops-image-publish:
<<: *img
<<: *tag_only
stage: image
script:
- cd infrastructure/ddadevops && pyb image publish
kotlin-image-publish:
<<: *img
<<: *tag_only
stage: image
script:
- cd infrastructure/kotlin && pyb image publish
backup-image-publish:
<<: *img
<<: *tag_only
stage: image
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: never
- if: '$CI_COMMIT_TAG =~ /^[0-9]+\.[0-9]+\.[0-9]+$/'
script:
- cd infrastructure/devops-build && pyb image test publish
- cd infrastructure/backup && pyb image publish

@ -2,7 +2,6 @@
[![Slack](https://img.shields.io/badge/chat-clojurians-green.svg?style=flat)](https://clojurians.slack.com/messages/#dda-pallet/) | [<img src="https://meissa-gmbh.de/img/community/Mastodon_Logotype.svg" width=20 alt="team@social.meissa-gmbh.de"> team@social.meissa-gmbh.de](https://social.meissa-gmbh.de/@team) | [Website & Blog](https://domaindrivenarchitecture.org)
![release prod](https://github.com/DomainDrivenArchitecture/dda-devops-build/workflows/release%20prod/badge.svg)
dda-devops-build integrates all the tools we use to work with clouds & provide some nice functions around.
@ -70,12 +69,12 @@ classDiagram
DevopsBuild <|-- ProvsK3sBuild
DevopsBuild <|-- C4kBuild
link DevopsBuild "./doc/DevopsBuild.md"
link DevopsImageBuild "./doc/DevopsImageBuild.md"
link DevopsTerraformBuild "./doc/DevopsTerraformBuild.md"
link ReleaseMixin "./doc/ReleaseMixin.md"
link ProvsK3sBuild "doc/ProvsK3sBuild.md"
link C4kBuild "doc/C4kBuild.md"
link DevopsBuild "dda-devops-build/src/doc/DevopsBuild.md"
link DevopsImageBuild "dda-devops-build/src/doc/DevopsImageBuild.md"
link DevopsTerraformBuild "dda-devops-build/src/doc/DevopsTerraformBuild.md"
link ReleaseMixin "dda-devops-build/src/doc/ReleaseMixin.md"
link ProvsK3sBuild "dda-devops-build/src/doc/ProvsK3sBuild.md"
link C4kBuild "dda-devops-build/src/doc/C4kBuild.md"
```
@ -84,6 +83,10 @@ Principles we follow are:
* Seperate build artefacts from version controlled code
* Domain Driven Design - in order to stay sustainable
## Example Project
An example project which is using dda-devops-build can be found at: https://repo.prod.meissa.de/meissa/buildtest
## Installation
Ensure that yout python3 version is at least Python 3.10
@ -94,17 +97,9 @@ pip3 install -r requirements.txt
export PATH=$PATH:~/.local/bin
```
## Reference
## Example Project
* [DevopsBuild](./doc/DevopsBuild.md)
* [DevopsImageBuild](./doc/DevopsImageBuild.md)
* [DevopsTerraformBuild](./doc/DevopsTerraformBuild.md)
* [AwsProvider](doc/DevopsTerraformBuildWithAwsProvider.md)
* [DigitaloceanProvider](doc/DevopsTerraformBuildWithDigitaloceanProvider.md)
* [HetznerProvider](doc/DevopsTerraformBuildWithHetznerProvider.md)
* [ReleaseMixin](./doc/ReleaseMixin.md)
* [ProvsK3sBuild](doc/ProvsK3sBuild.md)
* [C4kBuild](doc/C4kBuild.md)
An example project which is using dda-devops-build can be found at: https://repo.prod.meissa.de/meissa/buildtest
## Example Build
@ -186,10 +181,23 @@ def destroy(project):
pyb dev publish upload
pip3 install --upgrade ddadevops --pre
pyb [patch|minor|major] prepare_release tag_bump_and_push_release
pyb [patch|minor|major]
pip3 install --upgrade ddadevops
```
## Reference
* [DevopsBuild](./doc/DevopsBuild.md)
* [DevopsImageBuild](./doc/DevopsImageBuild.md)
* [DevopsTerraformBuild](./doc/DevopsTerraformBuild.md)
* [AwsProvider](doc/DevopsTerraformBuildWithAwsProvider.md)
* [DigitaloceanProvider](doc/DevopsTerraformBuildWithDigitaloceanProvider.md)
* [HetznerProvider](doc/DevopsTerraformBuildWithHetznerProvider.md)
* [ReleaseMixin](./doc/ReleaseMixin.md)
* [ProvsK3sBuild](doc/ProvsK3sBuild.md)
* [C4kBuild](doc/C4kBuild.md)
## Development & mirrors
Development happens at: https://repo.prod.meissa.de/meissa/dda-devops-build
@ -197,6 +205,7 @@ Development happens at: https://repo.prod.meissa.de/meissa/dda-devops-build
Mirrors are:
* https://gitlab.com/domaindrivenarchitecture/dda-devops-build (issues and PR, CI)
* https://github.com/DomainDrivenArchitecture/dda-devops-build
For more details about our repository model see: https://repo.prod.meissa.de/meissa/federate-your-repos
@ -204,8 +213,3 @@ For more details about our repository model see: https://repo.prod.meissa.de/mei
Copyright © 2021 meissa GmbH
Licensed under the [Apache License, Version 2.0](LICENSE) (the "License")
## License
Copyright © 2023 meissa GmbH
Licensed under the [Apache License, Version 2.0](LICENSE) (the "License")

@ -22,63 +22,71 @@ from ddadevops import *
use_plugin("python.core")
use_plugin("copy_resources")
use_plugin("filter_resources")
#use_plugin("python.unittest")
#use_plugin("python.coverage")
# use_plugin("python.unittest")
# use_plugin("python.coverage")
use_plugin("python.distutils")
#use_plugin("python.install_dependencies")
# use_plugin("python.install_dependencies")
default_task = "dev"
name = "ddadevops"
MODULE = "not-used"
PROJECT_ROOT_PATH = "."
version = "4.0.12-dev"
version = "4.12.1-dev"
summary = "tools to support builds combining gopass, terraform, dda-pallet, aws & hetzner-cloud"
description = __doc__
authors = [Author("meissa GmbH", "buero@meissa-gmbh.de")]
url = "https://repo.prod.meissa.de/meissa/dda-devops-build"
requires_python = ">=3.10" # CHECK IF NEW VERSION EXISTS
requires_python = ">=3.10" # CHECK IF NEW VERSION EXISTS
license = "Apache Software License"
@init
def initialize(project):
#project.build_depends_on('mockito')
#project.build_depends_on('unittest-xml-reporting')
project.build_depends_on("ddadevops>=4.0.0")
# project.build_depends_on('mockito')
# project.build_depends_on('unittest-xml-reporting')
project.build_depends_on("ddadevops>=4.7.0")
project.set_property("verbose", True)
project.get_property("filter_resources_glob").append("main/python/ddadevops/__init__.py")
project.get_property("filter_resources_glob").append(
"main/python/ddadevops/__init__.py"
)
project.set_property("dir_source_unittest_python", "src/test/python")
project.set_property("copy_resources_target", "$dir_dist/ddadevops")
project.get_property("copy_resources_glob").append("LICENSE")
project.get_property("copy_resources_glob").append("src/main/resources/terraform/*")
project.get_property("copy_resources_glob").append("src/main/resources/docker/image/resources/*")
project.get_property("copy_resources_glob").append(
"src/main/resources/docker/image/resources/*"
)
project.include_file("ddadevops", "LICENSE")
project.include_file("ddadevops", "src/main/resources/terraform/*")
project.include_file("ddadevops", "src/main/resources/docker/image/resources/*")
#project.set_property('distutils_upload_sign', True)
#project.set_property('distutils_upload_sign_identity', '')
# project.set_property('distutils_upload_sign', True)
# project.set_property('distutils_upload_sign_identity', '')
project.set_property("distutils_readme_description", True)
project.set_property("distutils_description_overwrite", True)
project.set_property("distutils_classifiers", [
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.10',
'Operating System :: POSIX :: Linux',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing'
])
project.set_property(
"distutils_classifiers",
[
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.10",
"Operating System :: POSIX :: Linux",
"Operating System :: OS Independent",
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Quality Assurance",
"Topic :: Software Development :: Testing",
],
)
input = {
"name": name,
@ -88,59 +96,106 @@ def initialize(project):
"build_types": [],
"mixin_types": ["RELEASE"],
"release_primary_build_file": "build.py",
"release_secondary_build_files": [
"infrastructure/backup/build.py",
"infrastructure/python/build.py",
"infrastructure/dind/build.py",
"infrastructure/ddadevops/build.py",
"infrastructure/clj-cljs/build.py",
"infrastructure/clj/build.py",
"infrastructure/kotlin/build.py",
],
"release_artifacts": [],
"release_artifact_server_url": "https://repo.prod.meissa.de",
"release_organisation": "meissa",
"release_repository_name": "dda-devops-build",
}
build = ReleaseMixin(project, input)
build.initialize_build_dir()
@task
def test(project):
run("pytest", check=True)
@task
def lint(project):
run("flake8 --max-line-length=120 --count --select=E9,F63,F7,F82 "+
"--show-source --statistics src/main/python/ddadevops/", shell=True, check=True)
run("flake8 --count --exit-zero --max-complexity=10 --max-line-length=127 "+
"--per-file-ignores=\"__init__.py:F401\" "+
"--ignore=E722,W503 --statistics src/main/python/ddadevops/", shell=True, check=True)
run("python -m mypy src/main/python/ddadevops/ --ignore-missing-imports "+
"--disable-error-code=attr-defined --disable-error-code=union-attr", shell=True, check=True)
run("pylint -d W0511,R0903,C0301,W0614,C0114,C0115,C0116,similarities,W1203,W0702,W0702,"+
"R0913,R0902,R0914,R1732,R1705,W0707,C0123,W0703,C0103 src/main/python/ddadevops/", shell=True, check=True)
run(
"flake8 --max-line-length=120 --count --select=E9,F63,F7,F82 "
+ "--show-source --statistics src/main/python/ddadevops/",
shell=True,
check=True,
)
run(
"flake8 --count --exit-zero --max-complexity=10 --max-line-length=127 "
+ '--per-file-ignores="__init__.py:F401" '
+ "--ignore=E722,W503 --statistics src/main/python/ddadevops/",
shell=True,
check=True,
)
run(
"python -m mypy src/main/python/ddadevops/ --ignore-missing-imports "
+ "--disable-error-code=attr-defined --disable-error-code=union-attr",
shell=True,
check=True,
)
run(
"pylint -d W0511,R0903,C0301,W0614,C0114,C0115,C0116,similarities,W1203,W0702,W0702,"
+ "R0913,R0902,R0914,R1732,R1705,W0707,C0123,W0703,C0103 src/main/python/ddadevops/",
shell=True,
check=True,
)
@task
def patch(project):
build(project, "PATCH")
linttest(project, "PATCH")
release(project)
@task
def minor(project):
build(project, "MINOR")
linttest(project, "MINOR")
release(project)
@task
def major(project):
build(project, "MAJOR")
linttest(project, "MAJOR")
release(project)
@task
def dev(project):
build(project, "NONE")
linttest(project, "NONE")
@task
def nothing(project):
pass
@task
def prepare_release(project):
def prepare(project):
build = get_devops_build(project)
build.prepare_release()
@task
def tag_bump_and_push_release(project):
def tag(project):
build = get_devops_build(project)
build.tag_bump_and_push_release()
def build(project, release_type):
@task
def publish_artifacts(project):
build = get_devops_build(project)
build.publish_artifacts()
def release(project):
prepare(project)
tag(project)
def linttest(project, release_type):
build = get_devops_build(project)
build.update_release_type(release_type)
test(project)

@ -35,7 +35,12 @@ classDiagram
| name | name in context of build & ENV | - | |
## Example Usage
### build.py
### Example project
A complete example project you can find on: https://repo.prod.meissa.de/meissa/buildtest
### Example of a build.py
```python
from os import environ

@ -23,12 +23,9 @@ classDiagram
| build_dir_name | name of dir, build is executed in | target |
| build_types | list of special builds used. Valid values are ["IMAGE", "C4K", "K3S", "TERRAFORM"] | [] |
| mixin_types | mixins are orthoganl to builds and represent additional capabilities. Valid Values are ["RELEASE"] | [] |
| module | module name - may result in a hierarchy like name/module | |
| name | dedicated name of the build | module |
| project_root_path | relative path to projects root. Is used to locate the target dir | |
| stage | sth. like test, int, acc or prod | |
## Example Usage
### build.py
```python

@ -30,7 +30,7 @@ classDiagram
| image_dockerhub_user | user to access docker-hub | IMAGE_DOCKERHUB_USER from env or credentials from gopass |
| image_dockerhub_password | password to access docker-hub | IMAGE_DOCKERHUB_PASSWORD from env or credentials from gopass |
| image_tag | tag for publishing the image | IMAGE_TAG from env |
| image_naming | Strategy for calculate the image name. Posible values are [NAME_ONLY,NAME_AND_MODULE] |NAME_ONLY |
### Credentials Mapping defaults

@ -0,0 +1,33 @@
# ddadevops Images
## ddadevops-clojure
Contains
* clojure
* shadowcljs
* lein
* java
* graalvm
* pybuilder, ddadevops
## ddadevops
Contains:
* pybuilder, ddadevops
## devops-build
Image is deprecated.
## ddadevops-dind
Contains:
* docker in docker
* pybuilder, ddadevops
## ddadevops-python
Contains:
* python 3.10
* python linting
* python setup-tools
* pybuilder, ddadevops

@ -13,14 +13,17 @@ classDiagram
## Input
| name | description | default |
| ----------------------------- | ----------------------------------------------------------------- | --------- |
| k3s_provision_user | the user used to provision k3s | "root" |
| k3s_letsencrypt_email | email address used for letsencrypt | |
| k3s_letsencrypt_endpoint | letsencrypt endpoint. Valid values are staging, prod | "staging" |
| k3s_app_filename_to_provision | an k8s manifest to apply imediately after k3s setup was sucessful | |
| k3s_enable_echo | provision the echo app on k3s. Valid values are true, false | "false" |
| k3s_provs_template | use a individual template for provs config | None |
| name | description | default |
| --------------------------------- | ----------------------------------------------------------------- | --------- |
| k3s_provision_user | the user used to provision k3s | "root" |
| k3s_letsencrypt_email | email address used for letsencrypt | |
| k3s_letsencrypt_endpoint | letsencrypt endpoint. Valid values are staging, prod | "staging" |
| k3s_app_filename_to_provision | an k8s manifest to apply imediately after k3s setup was sucessful | |
| k3s_enable_echo | provision the echo app on k3s. Valid values are true, false | "false" |
| k3s_provs_template | use a individual template for provs config | None |
| k3s_enable_hetzner_csi | enable hetzner csi | False |
| k3s_hetzner_api_token | hetzner_api_token | None |
| k3s_hetzner_encryption_passphrase | encryption passphrase for volumes | None |
### Credentials Mapping defaults

@ -1,5 +1,15 @@
# ReleaseMixin
- [ReleaseMixin](#releasemixin)
- [Input](#input)
- [Example Usage just for creating releases](#example-usage-just-for-creating-releases)
- [build.py](#buildpy)
- [call the build for creating a major release](#call-the-build-for-creating-a-major-release)
- [Example Usage for creating a release on forgejo / gitea \& upload the generated artifacts](#example-usage-for-creating-a-release-on-forgejo--gitea--upload-the-generated-artifacts)
- [build.py](#buildpy-1)
- [call the build](#call-the-build)
Support for releases following the trunk-based-release flow (see https://trunkbaseddevelopment.com/)
```mermaid
@ -8,6 +18,7 @@ classDiagram
prepare_release() - adjust all build files to carry the correct version & commit locally
tag_and_push_release() - tag the git repo and push changes to origin
update_release_type (release_type) - change the release type during run time
publish_artifacts() - publish release & artifacts to forgejo/gitea
}
```
@ -15,18 +26,22 @@ classDiagram
## Input
| name | description | default |
| ----------------------------- | --------------------------------------------------------------------------------------------------------------------- | --------------- |
| ----------------------------- |-----------------------------------------------------------------------------------------------------------------------| --------------- |
| release_type | one of MAJOR, MINOR, PATCH, NONE | "NONE" |
| release_main_branch | the name of your trank | "main" |
| release_main_branch | the name of your trunk | "main" |
| release_primary_build_file | path to the build file having the leading version info (read & write). Valid extensions are .clj, .json, .gradle, .py | "./project.clj" |
| release_secondary_build_files | list of secondary build files, version is written in. | [] |
| release_artifact_server_url | Optional: The base url of your forgejo/gitea instance to publish a release tode | |
| release_organisation | Optional: The repository organisation name | |
| release_repository_name | Optional: The repository name name | |
| release_artifacts | Optional: The list of artifacts to publish to the release generated name | [] |
## Example Usage
## Example Usage just for creating releases
### build.py
```python
rom os import environ
from os import environ
from pybuilder.core import task, init
from ddadevops import *
@ -36,7 +51,7 @@ PROJECT_ROOT_PATH = '..'
@init
def initialize(project):
project.build_depends_on("ddadevops>=4.0.0")
project.build_depends_on("ddadevops>=4.7.0")
input = {
"name": name,
@ -48,35 +63,108 @@ def initialize(project):
"release_type": "MINOR",
"release_primary_build_file": "project.clj",
"release_secondary_build_files": ["package.json"],
}
roject.build_depends_on("ddadevops>=4.0.0-dev")
}
build = ReleaseMixin(project, input)
build.initialize_build_dir()
@task
def prepare_release(project):
build = get_devops_build(project)
build.prepare_release()
def patch(project):
linttest(project, "PATCH")
release(project)
@task
def build(project):
print("do the build")
def minor(project):
linttest(project, "MINOR")
release(project)
@task
def major(project):
linttest(project, "MAJOR")
release(project)
@task
def dev(project):
linttest(project, "NONE")
@task
def publish(project):
print("publish your artefacts")
def prepare(project):
build = get_devops_build(project)
build.prepare_release()
@task
def after_publish(project):
def tag(project):
build = get_devops_build(project)
build.tag_bump_and_push_release()
def release(project):
prepare(project)
tag(project)
def linttest(project, release_type):
build = get_devops_build(project)
build.update_release_type(release_type)
#test(project)
#lint(project)
```
### call the build for creating a major release
```bash
pyb major
```
## Example Usage for creating a release on forgejo / gitea & upload the generated artifacts
### build.py
```python
rom os import environ
from pybuilder.core import task, init
from ddadevops import *
name = 'my-project'
MODULE = 'my-module'
PROJECT_ROOT_PATH = '..'
@init
def initialize(project):
project.build_depends_on("ddadevops>=4.7.0")
input = {
"name": name,
"module": MODULE,
"stage": "notused",
"project_root_path": PROJECT_ROOT_PATH,
"build_types": [],
"mixin_types": ["RELEASE"],
"release_type": "MINOR",
"release_primary_build_file": "project.clj",
"release_secondary_build_files": ["package.json"],
"release_artifact_server_url": "https://repo.prod.meissa.de",
"release_organisation": "meissa",
"release_repository_name": "dda-devops-build",
"release_artifacts": ["target/doc.zip"],
}
build = ReleaseMixin(project, input)
build.initialize_build_dir()
@task
def publish_artifacts(project):
build = get_devops_build(project)
build.publish_artifacts()
```
### call the build
```bash
pyb prepare_release build publish after_publish
git checkout "4.7.0"
pyb publish_artifacts
```

@ -12,6 +12,7 @@ classDiagram
}
class Image {
image_naming
image_dockerhub_user
image_dockerhub_password
image_publish_tag
@ -87,6 +88,15 @@ classDiagram
release_type
release_main_branch
release_current_branch
release_artifact_server_url
release_organisation
release_repository_name
release_artifact_token
}
class Artifact {
path_str
path()
type()
}
class Credentials {
<<AggregateRoot>>
@ -129,6 +139,7 @@ classDiagram
TerraformDomain *-- "0..1" ProviderAws: providers
Release o-- "0..1" BuildFile: primary_build_file
Release o-- "0..n" BuildFile: secondary_build_files
Release "1" *-- "0..n" Artifact: release_artifacts
Release "1" *-- "1" Version: version
BuildFile *-- "1" Version: version
C4k *-- DnsRecord: dns_record

@ -16,35 +16,85 @@ We discussed how we will handle releases in cooperation with gitlab-ci.
### Outcome of Eventstroming: Events ordered by time
1. B: Pulls the latest changes
1. B: Possibly merge/rebase with main
1.
1. B: starts "create release-notes"
1. B: commits his changes with [skip-ci].
1.
1. B: starts the release build and specifies major, minor, patch
1.
1. S: does a git fetch & status and checks if there are no changes at origin
1. S: starts tests
1. S: runs the linting
1. S: possibly does image building and image testing
1.
1. S: version numbers are adjusted in project.clj/package.json to full version
1. S: change commit is tagged with git tag
1. S: version numbers are adjusted in project.clj/package.json to next snapshot version
1. S: makes a bump commit with [skip-ci].
1. S: push to gitlab/gitea along with git tags
1.
1. S: CI starts - for a new tag
1. S: CI runs tests
1. S: runs the linting
1. S: makes artifacts
1. S: possibly performs image building and image testing
1. S: publishes images and artifacts
1.
1. S: CI starts - for push with the last commit
1. S: CI runs tests
1. S: performs the linting
* B: is the human devops
* S: is the build / ci system
```mermaid
stateDiagram-v2
state prepare_release {
state "B: Pulls the latest changes" as pull
state "B: Possibly merge/rebase with main" as merge
state "B: starts 'create release-notes'" as rn
state "B: commits his changes with [skip-ci]." as c1
[*] --> pull
pull --> merge
merge --> rn
rn --> c1
c1 --> [*]
}
state release {
state "B: starts the release build and specifies major, minor, patch" as trigger
state "S: does a git fetch & status and checks if there are no changes at origin" as fetch
state "S: starts tests" as test
state "S: runs the linting" as lint
state "S: possibly does image building and image testing" as image
state "S: version numbers are adjusted in project.clj/package.json to full version" as vno
state "S: change commit is tagged with git tag" as c2
state "S: version numbers are adjusted in project.clj/package.json to next snapshot " as snap
state "S: makes a bump commit with [skip-ci]." as c3
state "S: push to gitlab/gitea along with git tags" as push
[*] --> trigger
trigger --> fetch
fetch --> lint
fetch --> test
fetch --> image
test --> vno
lint --> vno
image --> vno
vno --> c2
c2 --> snap
snap --> c3
c3 --> push
push --> [*]
}
state ci_tag {
state "S: CI starts - for a new tag" as ct1
state "S: runs the linting" as ct2
state "S: CI runs tests" as ct3
state "S: makes artifacts" as ct4
state "S: possibly performs image building and image testing" as ct5
state "S: publishes images and artifacts" as ct6
[*] --> ct1
ct1 --> ct2
ct2 --> ct3
ct3 --> ct4
ct4 --> ct5
ct5 --> ct6
ct6 --> [*]
}
state ci_version_bump {
state "S: CI starts - for push with the last commit" as cvb1
state "S: CI runs tests" as cvb2
state "S: performs the linting" as cvb3
[*] --> cvb1
cvb1 --> cvb2
cvb2 --> cvb3
cvb3 --> [*]
}
[*] --> prepare_release
prepare_release --> release
release --> ci_tag
release --> ci_version_bump
ci_tag --> [*]
ci_version_bump --> [*]
```
## Consequences

@ -1,8 +0,0 @@
adjust version no in build.py to release version no.
git commit -am "release"
git tag -am "release" [release version no]
git push --follow-tags
increase version no in build.py
git commit -am "version bump"
git push
pip3 install --upgrade ddadevops

@ -0,0 +1,51 @@
from os import environ
from datetime import datetime
from pybuilder.core import task, init
from ddadevops import *
import logging
name = 'dda-backup'
MODULE = 'NOT_SET'
PROJECT_ROOT_PATH = '../..'
version = "4.12.1-dev"
@init
def initialize(project):
image_tag = version
if "dev" in image_tag:
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
input = {
"name": name,
"module": MODULE,
"stage": "notused",
"project_root_path": PROJECT_ROOT_PATH,
"build_types": ["IMAGE"],
"mixin_types": [],
"image_naming": "NAME_ONLY",
"image_tag": f"{image_tag}",
}
project.build_depends_on("ddadevops>=4.7.0")
build = DevopsImageBuild(project, input)
build.initialize_build_dir()
@task
def image(project):
build = get_devops_build(project)
build.image()
@task
def drun(project):
build = get_devops_build(project)
build.drun()
@task
def publish(project):
build = get_devops_build(project)
build.dockerhub_login()
build.dockerhub_publish()

@ -0,0 +1,79 @@
## Init Statemachine
### Inputs
1. `restic-password: ""`
2. `restic-password-to-rotate: ""`
### Manual init the restic repository for the first time
1. apply backup-and-restore pod:
`kubectl scale deployment backup-restore --replicas=1`
2. exec into pod and execute restore pod (press tab to get your exact pod name)
`kubectl exec -it backup-restore-... -- /usr/local/bin/init.sh`
3. remove backup-and-restore pod:
`kubectl scale deployment backup-restore --replicas=0`
### Password Rotation
1. apply backup-and-restore pod:
`kubectl scale deployment backup-restore --replicas=1`
2. add new password to restic repository
`restic key add ....`
=> Trigger ::
field (1) credential current
filed (2) credential new
3. replace field (1) with (2) & clear (2)
4. remove old key - ???
`restic remove ....`
```mermaid
stateDiagram-v2
[*] --> init
init --> backup_ready: trigger, restic-password !empty
backup_ready --> new_password_added: restic-password !empty && restic-password-to-rotate !empty
new_password_added --> backup_ready: restic-password !empty && restic-password-to-rotate empty
```
### First Steps
1. Cloud Testserver hochfahren
2. Dort backup-restore deployment (leeres Secret mgl.?), neues Secret "rotation-credential-secret" als Daten
3. mounten von angelegtem Secret in Pod backup-restore
4. ba*bash*ka Skript in pod starten -> liest Secret ?leer
5. Micha cons.
```mermaid
sequenceDiagram
participant k8s
participant e as entrypoint.sh
participant rm as restic-management.clj
k8s ->> e: cronjob calls
e ->> rm: start-file
rm ->> rm: rotate
activate rm
rm ->> rm: read-backup-repository-state (state)
rm ->> rm: read-secret (backup-secret/restic-password, rotation-credential-secret/rotation-credential)
rm ->> rm: switch
activate rm
rm ->> rm: if init && restic-password != null
activate rm
rm ->> rm: init.sh
rm ->> rm: state init -> backup-ready
deactivate rm
rm ->> rm: if backup-ready && rotation-credential != null
activate rm
rm ->> rm: add-new-password-to-restic-repository.sh
rm ->> rm: state backup-ready -> new-password-added
deactivate rm
rm ->> rm: if new-password-added && rotation-credential == null
activate rm
rm ->> rm: remove-old-password-from-restic-repository.sh
rm ->> rm: state new-password-added -> backup-ready
deactivate rm
deactivate rm
rm ->> rm: store-repository-state (state)
deactivate rm
```

@ -0,0 +1,5 @@
FROM ubuntu:jammy
# install it
ADD resources /tmp/
RUN /tmp/install.sh

@ -0,0 +1,69 @@
backup_file_path='files'
function init-file-repo() {
if [ -z ${CERTIFICATE_FILE} ];
then
restic -r ${RESTIC_REPOSITORY}/${backup_file_path} -v init
else
restic -r ${RESTIC_REPOSITORY}/${backup_file_path} -v init --cacert ${CERTIFICATE_FILE}
fi
}
# First arg is the directory, second is optional for the path to a certificate file
function backup-directory() {
local directory="$1"; shift
if [ -z ${CERTIFICATE_FILE} ];
then
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache
cd ${directory} && restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} backup .
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune
else
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache --cacert ${CERTIFICATE_FILE}
cd ${directory} && restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} backup . --cacert ${CERTIFICATE_FILE}
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune --cacert ${CERTIFICATE_FILE}
fi
}
# First arg is the directory, the remaining args are the sub-directories (relative to the first directory) to backup.
function backup-fs-from-directory() {
local directory="$1"; shift
if [ -z ${CERTIFICATE_FILE} ];
then
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache
cd ${directory} && restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} backup $@
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune
else
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache --cacert ${CERTIFICATE_FILE}
cd ${directory} && restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} backup $@ --cacert ${CERTIFICATE_FILE}
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune --cacert ${CERTIFICATE_FILE}
fi
}
function restore-directory() {
local directory="$1"; shift
local snapshot_id="${1:-latest}"; shift
if [ -z ${CERTIFICATE_FILE} ];
then
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache
rm -rf ${directory}*
restic -v -r $RESTIC_REPOSITORY/${backup_file_path} restore ${snapshot_id} --target ${directory}
else
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache --cacert ${CERTIFICATE_FILE}
rm -rf ${directory}*
restic -v -r $RESTIC_REPOSITORY/${backup_file_path} restore ${snapshot_id} --target ${directory} --cacert ${CERTIFICATE_FILE}
fi
}
function list-snapshot-files() {
if [ -z ${CERTIFICATE_FILE} ];
then
restic -r ${RESTIC_REPOSITORY}/${backup_file_path} snapshots
else
restic -r ${RESTIC_REPOSITORY}/${backup_file_path} snapshots --cacert ${CERTIFICATE_FILE}
fi
}

@ -0,0 +1,21 @@
# usage: file_env VAR [DEFAULT]
# ie: file_env 'XYZ_DB_PASSWORD' 'example'
# (will allow for "$XYZ_DB_PASSWORD_FILE" to fill in the value of
# "$XYZ_DB_PASSWORD" from a file, especially for Docker's secrets feature)
function file_env() {
local var="$1"
local fileVar="${var}_FILE"
local def="${2:-}"
if [ "${!var:-}" ] && [ "${!fileVar:-}" ]; then
echo >&2 "error: both $var and $fileVar are set (but are exclusive)"
exit 1
fi
local val="$def"
if [ "${!var:-}" ]; then
val="${!var}"
elif [ "${!fileVar:-}" ]; then
val="$(< "${!fileVar}")"
fi
export "$var"="$val"
unset "$fileVar"
}

@ -0,0 +1,36 @@
#!/bin/bash
set -exo pipefail
function babashka_install() {
babashka_version="1.3.189"
curl -SsLo /tmp/babashka-${babashka_version}-linux-amd64.tar.gz https://github.com/babashka/babashka/releases/download/v${babashka_version}/babashka-${babashka_version}-linux-amd64.tar.gz
curl -SsLo /tmp/checksum https://github.com/babashka/babashka/releases/download/v${babashka_version}/babashka-${babashka_version}-linux-amd64.tar.gz.sha256
echo " /tmp/babashka-$babashka_version-linux-amd64.tar.gz"|tee -a /tmp/checksum
sha256sum -c --status /tmp/checksum
tar -C /tmp -xzf /tmp/babashka-${babashka_version}-linux-amd64.tar.gz
install -m 0700 -o root -g root /tmp/bb /usr/local/bin/
}
function main() {
{
upgradeSystem
apt-get install -qqy ca-certificates curl gnupg postgresql-client-14 restic
curl -Ss --fail https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /etc/apt/trusted.gpg.d/postgresql-common_pgdg_archive_keyring.gpg
sh -c 'echo "deb [signed-by=/etc/apt/trusted.gpg.d/postgresql-common_pgdg_archive_keyring.gpg] https://apt.postgresql.org/pub/repos/apt jammy-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
upgradeSystem
babashka_install
} > /dev/null
update-ca-certificates
install -m 0400 /tmp/functions.sh /usr/local/lib/
install -m 0400 /tmp/pg-functions.sh /usr/local/lib/
install -m 0400 /tmp/file-functions.sh /usr/local/lib/
install -m 0740 /tmp/restic_management.clj /usr/local/bin/
cleanupDocker
}
source /tmp/install_functions_debian.sh
DEBIAN_FRONTEND=noninteractive DEBCONF_NOWARNINGS=yes main

@ -0,0 +1,149 @@
backup_pg_role_path='pg-role'
backup_pg_database_path='pg-database'
function init-command() {
restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} -v init $@
}
function init-role-repo() {
if [ -z ${CERTIFICATE_FILE} ];
then
init-command
else
init-command --cacert ${CERTIFICATE_FILE}
fi
}
function init-database-command() {
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} -v init $@
}
function init-database-repo() {
if [ -z ${CERTIFICATE_FILE} ];
then
init-database-command
else
init-database-command --cacert ${CERTIFICATE_FILE}
fi
}
function drop-create-db() {
psql -d template1 -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
--no-password -c "DROP DATABASE \"${POSTGRES_DB}\";"
psql -d template1 -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
--no-password -c "CREATE DATABASE \"${POSTGRES_DB}\";"
}
function create-pg-pass() {
local pg_host=${POSTGRES_HOST:-localhost}
echo "${pg_host}:${POSTGRES_DB}:${POSTGRES_USER}:${POSTGRES_PASSWORD}" > /root/.pgpass
echo "${POSTGRES_HOST}:template1:${POSTGRES_USER}:${POSTGRES_PASSWORD}" >> /root/.pgpass
chmod 0600 /root/.pgpass
}
function roles-unlock-command() {
restic -v -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} unlock --cleanup-cache $@
}
function roles-forget-command() {
restic -v -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune $@
}
function backup-roles() {
local role_prefix="$1"; shift
if [ -z ${CERTIFICATE_FILE} ];
then
roles-unlock-command
pg_dumpall -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U${POSTGRES_USER} --no-password --roles-only | \
grep ${role_prefix} | restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} backup --stdin
roles-forget-command
else
roles-unlock-command --cacert ${CERTIFICATE_FILE}
pg_dumpall -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U${POSTGRES_USER} --no-password --roles-only | \
grep ${role_prefix} | restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} backup --stdin --cacert ${CERTIFICATE_FILE}
roles-forget-command --cacert ${CERTIFICATE_FILE}
fi
}
function db-unlock-command() {
restic -v -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} unlock --cleanup-cache $@
}
function db-forget-command() {
restic -v -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune $@
}
function backup-db-dump() {
if [ -z ${CERTIFICATE_FILE} ];
then
db-unlock-command
pg_dump -d ${POSTGRES_DB} -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} \
-U ${POSTGRES_USER} --no-password --serializable-deferrable | \
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} backup --stdin
db-forget-command
else
db-unlock-command --cacert ${CERTIFICATE_FILE}
pg_dump -d ${POSTGRES_DB} -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} \
-U ${POSTGRES_USER} --no-password --serializable-deferrable | \
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} backup --stdin --cacert ${CERTIFICATE_FILE}
db-forget-command --cacert ${CERTIFICATE_FILE}
fi
}
function restore-roles() {
local snapshot_id="${1:-latest}"; shift
if [ -z ${CERTIFICATE_FILE} ];
then
roles-unlock-command
restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} dump ${snapshot_id} stdin | \
psql -d template1 -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
--no-password
else
roles-unlock-command --cacert ${CERTIFICATE_FILE}
restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} dump ${snapshot_id} stdin --cacert ${CERTIFICATE_FILE} | \
psql -d template1 -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
--no-password
fi
}
function restore-db() {
local snapshot_id="${1:-latest}"; shift
if [ -z ${CERTIFICATE_FILE} ];
then
db-unlock-command
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} dump ${snapshot_id} stdin | \
psql -d ${POSTGRES_DB} -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
--no-password
else
db-unlock-command --cacert ${CERTIFICATE_FILE}
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} dump ${snapshot_id} stdin --cacert ${CERTIFICATE_FILE} | \
psql -d ${POSTGRES_DB} -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
--no-password
fi
}
function list-snapshot-roles() {
if [ -z ${CERTIFICATE_FILE} ];
then
restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} snapshots
else
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} snapshots --cacert ${CERTIFICATE_FILE}
fi
}
function list-snapshot-db() {
if [ -z ${CERTIFICATE_FILE} ];
then
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} snapshots
else
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} snapshots --cacert ${CERTIFICATE_FILE}
fi
}

@ -0,0 +1,51 @@
#! /usr/bin/env bb
(ns restic-management
(:require
[clojure.spec.alpha :as s]
[clojure.java.io :as io]
[clojure.edn :as edn]))
(s/def ::state string?)
(s/def ::backup-repository-state
(s/keys :req-un [::state]))
(def state {:state ""})
(defn store-backup-repository-state [s]
(spit "backup-repository-state.edn" s))
(defn read-backup-repository-state []
(try
(with-open [r (io/reader "backup-repository-state.edn")]
(edn/read (java.io.PushbackReader. r)))
(catch java.io.IOException e
(printf "Couldn't open '%s': %s\n" "backup-repository-state.edn" (.getMessage e)))
(catch RuntimeException e
(printf "Error parsing edn file '%s': %s\n" "backup-repository-state.edn" (.getMessage e)))))
(defn read-secret [s]
(slurp (str "/var/run/secrets/" s)))
;"/var/run/secrets/rotation-credential-secret/rotation-credential"))
;(println (read-backup-repository-state))
;(println (:state (read-backup-repository-state)))
;(println (s/valid? ::backup-repository-state (read-backup-repository-state)))
(println (read-secret "rotation-credential-secret/rotation-credential"))
(println (read-secret "backup-secrets/restic-password"))
(s/def ::new-password string?)
(s/def ::old-password string?)
(s/def ::password-state
(s/keys :req-un [::new-password ::old-password]))
(defn rotate []
(let [state {:new-password (read-secret "rotation-credential-secret/rotation-credential")
:old-password (read-secret "backup-secrets/restic-password")}]
(store-backup-repository-state (prn-str state))))
(rotate)

@ -0,0 +1,56 @@
from os import environ
from datetime import datetime
from pybuilder.core import task, init
from ddadevops import *
name = "ddadevops"
MODULE = "clj-cljs"
PROJECT_ROOT_PATH = "../.."
version = "4.12.1-dev"
@init
def initialize(project):
image_tag = version
if "dev" in image_tag:
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
input = {
"name": name,
"module": MODULE,
"stage": "notused",
"project_root_path": PROJECT_ROOT_PATH,
"build_types": ["IMAGE"],
"mixin_types": [],
"image_naming": "NAME_AND_MODULE",
"image_tag": f"{image_tag}",
}
project.build_depends_on("ddadevops>=4.0.0")
build = DevopsImageBuild(project, input)
build.initialize_build_dir()
@task
def image(project):
build = get_devops_build(project)
build.image()
@task
def drun(project):
build = get_devops_build(project)
build.drun()
@task
def test(project):
build = get_devops_build(project)
build.test()
@task
def publish(project):
build = get_devops_build(project)
build.dockerhub_login()
build.dockerhub_publish()

@ -0,0 +1,4 @@
FROM node:lts-bookworm-slim
ADD resources /tmp
RUN /tmp/install.sh

@ -0,0 +1,45 @@
#!/bin/bash
set -exo pipefail
function main() {
{
upgradeSystem
mkdir -p /usr/share/man/man1
apt-get -qqy install curl openjdk-17-jre-headless leiningen
# shadow-cljs
npm install -g npm
npm install -g --save-dev shadow-cljs
# download kubeconform & graalvm
kubeconform_version="0.6.4"
curl -SsLo /tmp/kubeconform-linux-amd64.tar.gz https://github.com/yannh/kubeconform/releases/download/v${kubeconform_version}/kubeconform-linux-amd64.tar.gz
curl -SsLo /tmp/CHECKSUMS https://github.com/yannh/kubeconform/releases/download/v${kubeconform_version}/CHECKSUMS
# checksum kubeconform
checksum
# install kubeconform
tar -C /usr/local/bin -xf /tmp/kubeconform-linux-amd64.tar.gz --exclude=LICENSE
#install pyb
apt-get -qqy install python3 python3-pip git
pip3 install pybuilder 'ddadevops>=4.7.0' deprecation dda-python-terraform boto3 pyyaml inflection --break-system-packages
#check
lein --help
cleanupDocker
} > /dev/null
}
function checksum() {
awk '{print $1 " /tmp/" $2}' /tmp/CHECKSUMS|sed -n '2p' > /tmp/kubeconform-checksum
cat /tmp/kubeconform-checksum
sha256sum -c --status /tmp/kubeconform-checksum
}
source /tmp/install_functions_debian.sh
DEBIAN_FRONTEND=noninteractive DEBCONF_NOWARNINGS=yes main

@ -1,14 +1,18 @@
from os import environ
from datetime import datetime
from pybuilder.core import task, init
from ddadevops import *
name = "clojure"
MODULE = "image"
name = "ddadevops"
MODULE = "clj"
PROJECT_ROOT_PATH = "../.."
version = "4.12.1-dev"
@init
def initialize(project):
image_tag = version
if "dev" in image_tag:
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
input = {
"name": name,
@ -17,6 +21,8 @@ def initialize(project):
"project_root_path": PROJECT_ROOT_PATH,
"build_types": ["IMAGE"],
"mixin_types": [],
"image_naming": "NAME_AND_MODULE",
"image_tag": f"{image_tag}",
}
project.build_depends_on("ddadevops>=4.0.0")

@ -0,0 +1,6 @@
FROM debian:stable-slim
ADD resources /tmp
RUN /tmp/install.sh
ENV LANG=en_US.UTF-8 \
JAVA_HOME=/usr/lib/jvm/graalvm

@ -0,0 +1,57 @@
#!/bin/bash
set -exo pipefail
function main() {
{
upgradeSystem
apt-get -qqy install curl git openjdk-17-jre-headless leiningen build-essential libz-dev zlib1g-dev
# download kubeconform & graalvm
kubeconform_version="0.6.4"
graalvm_jdk_version="21.0.2"
curl -SsLo /tmp/kubeconform-linux-amd64.tar.gz https://github.com/yannh/kubeconform/releases/download/v${kubeconform_version}/kubeconform-linux-amd64.tar.gz
curl -SsLo /tmp/CHECKSUMS https://github.com/yannh/kubeconform/releases/download/v${kubeconform_version}/CHECKSUMS
curl -SsLo /tmp/graalvm-community-jdk.tar.gz https://github.com/graalvm/graalvm-ce-builds/releases/download/jdk-${graalvm_jdk_version}/graalvm-community-jdk-${graalvm_jdk_version}_linux-x64_bin.tar.gz
curl -SsLo /tmp/graalvm-checksum https://github.com/graalvm/graalvm-ce-builds/releases/download/jdk-${graalvm_jdk_version}/graalvm-community-jdk-${graalvm_jdk_version}_linux-x64_bin.tar.gz.sha256
# checksum kubeconform & graalvm-jdk
checksum
# install kubeconform
tar -C /usr/local/bin -xf /tmp/kubeconform-linux-amd64.tar.gz --exclude=LICENSE
# install graalvm
tar -C /usr/lib/jvm/ -xf /tmp/graalvm-community-jdk.tar.gz
dirname_graalvm=$(ls /usr/lib/jvm/|grep -e graa)
ln -s /usr/lib/jvm/$dirname_graalvm /usr/lib/jvm/graalvm
ln -s /usr/lib/jvm/graalvm/bin/gu /usr/local/bin
update-alternatives --install /usr/bin/java java /usr/lib/jvm/graalvm/bin/java 2
ln -s /usr/lib/jvm/graalvm/bin/native-image /usr/local/bin
#install pyb
apt-get -qqy install python3 python3-pip
pip3 install pybuilder 'ddadevops>=4.7.0' deprecation dda-python-terraform boto3 pyyaml inflection --break-system-packages
#check
native-image --version
lein -v
cleanupDocker
} > /dev/null
}
function checksum() {
#kubeconform
awk '{print $1 " /tmp/" $2}' /tmp/CHECKSUMS|sed -n '2p' > /tmp/kubeconform-checksum
sha256sum -c --status /tmp/kubeconform-checksum
#graalvm
echo " /tmp/graalvm-community-jdk.tar.gz"|tee -a /tmp/graalvm-checksum
sha256sum -c --status /tmp/graalvm-checksum
}
source /tmp/install_functions_debian.sh
DEBIAN_FRONTEND=noninteractive DEBCONF_NOWARNINGS=yes main

@ -1,4 +0,0 @@
FROM node:lts-buster-slim
ADD resources /tmp
RUN /tmp/install.sh

@ -1,2 +0,0 @@
d7a5cb848b783c15119316d716d8a74bf11c9e3ab050f3adf28e0678a6018467 kubeconform-v0.4.7.tar.gz
bbd3e03025168172a76c2a29e6a14c1c37e3476b30774259c3ef5952fb86f470 graalvm-ce-java11-linux-amd64-21.2.0.tar.gz

@ -1,43 +0,0 @@
#!/bin/bash
set -eux
function main() {
upgradeSystem
mkdir -p /usr/share/man/man1
apt -qqy install openjdk-11-jre-headless leiningen curl build-essential libz-dev zlib1g-dev
# shadow-cljs
npm install -g --save-dev shadow-cljs
# download kubeconform & graalvm
curl -Lo /tmp/kubeconform-v0.4.7.tar.gz https://github.com/yannh/kubeconform/releases/download/v0.4.7/kubeconform-linux-amd64.tar.gz
curl -Lo /tmp/graalvm-ce-java11-linux-amd64-21.2.0.tar.gz https://github.com/graalvm/graalvm-ce-builds/releases/download/vm-21.2.0/graalvm-ce-java11-linux-amd64-21.2.0.tar.gz
# checksum
cd /tmp
sha256sum --check CHECKSUMS
# install kubeconform
tar -xf /tmp/kubeconform-v0.4.7.tar.gz
cp kubeconform /usr/local/bin
# install graalvm
tar -xzf graalvm-ce-java11-linux-amd64-21.2.0.tar.gz
mv graalvm-ce-java11-21.2.0 /usr/lib/jvm/
ln -s /usr/lib/jvm/graalvm-ce-java11-21.2.0 /usr/lib/jvm/graalvm
ln -s /usr/lib/jvm/graalvm/bin/gu /usr/local/bin
update-alternatives --install /usr/bin/java java /usr/lib/jvm/graalvm/bin/java 2
gu install native-image
ln -s /usr/lib/jvm/graalvm/bin/native-image /usr/local/bin
#install lein
/tmp/lein.sh
cleanupDocker
}
source /tmp/install_functions.sh
main

@ -1,423 +0,0 @@
#!/usr/bin/env bash
# Ensure this file is executable via `chmod a+x lein`, then place it
# somewhere on your $PATH, like ~/bin. The rest of Leiningen will be
# installed upon first run into the ~/.lein/self-installs directory.
function msg {
echo "$@" 1>&2
}
export LEIN_VERSION="2.9.6"
# Must be sha256sum, will be replaced by bin/release
export LEIN_CHECKSUM='41c543f73eec4327dc20e60d5d820fc2a9dc772bc671610b9c385d9c4f5970b8'
case $LEIN_VERSION in
*SNAPSHOT) SNAPSHOT="YES" ;;
*) SNAPSHOT="NO" ;;
esac
if [[ "$CLASSPATH" != "" ]]; then
cat <<-'EOS' 1>&2
WARNING: You have $CLASSPATH set, probably by accident.
It is strongly recommended to unset this before proceeding.
EOS
fi
if [[ "$OSTYPE" == "cygwin" ]] || [[ "$OSTYPE" == "msys" ]]; then
delimiter=";"
else
delimiter=":"
fi
if [[ "$OSTYPE" == "cygwin" ]]; then
cygwin=true
else
cygwin=false
fi
function command_not_found {
msg "Leiningen couldn't find $1 in your \$PATH ($PATH), which is required."
exit 1
}
function make_native_path {
# ensure we have native paths
if $cygwin && [[ "$1" == /* ]]; then
echo -n "$(cygpath -wp "$1")"
elif [[ "$OSTYPE" == "msys" && "$1" == /?/* ]]; then
echo -n "$(sh -c "(cd $1 2</dev/null && pwd -W) || echo $1 | sed 's/^\\/\([a-z]\)/\\1:/g'")"
else
echo -n "$1"
fi
}
# usage : add_path PATH_VAR [PATH]...
function add_path {
local path_var="$1"
shift
while [ -n "$1" ];do
# http://bashify.com/?Useful_Techniques:Indirect_Variables:Indirect_Assignment
if [[ -z ${!path_var} ]]; then
export ${path_var}="$(make_native_path "$1")"
else
export ${path_var}="${!path_var}${delimiter}$(make_native_path "$1")"
fi
shift
done
}
function download_failed_message {
cat <<-EOS 1>&2
Failed to download $1 (exit code $2)
It's possible your HTTP client's certificate store does not have the
correct certificate authority needed. This is often caused by an
out-of-date version of libssl. It's also possible that you're behind a
firewall and haven't set HTTP_PROXY and HTTPS_PROXY.
EOS
}
function checksum_failed_message {
cat <<-EOS 1>&2
Failed to properly download $1
The checksum was mismatched. and we could not verify the downloaded
file. We expected a sha256 of
$2 and actually had
$3.
We used '$SHASUM_CMD' to verify the downloaded file.
EOS
}
function self_install {
if [ -r "$LEIN_JAR" ]; then
cat <<-EOS 1>&2
The self-install jar already exists at $LEIN_JAR.
If you wish to re-download, delete it and rerun "$0 self-install".
EOS
exit 1
fi
msg "Downloading Leiningen to $LEIN_JAR now..."
mkdir -p "$(dirname "$LEIN_JAR")"
LEIN_URL="https://github.com/technomancy/leiningen/releases/download/$LEIN_VERSION/leiningen-$LEIN_VERSION-standalone.zip"
$HTTP_CLIENT "$LEIN_JAR.pending" "$LEIN_URL"
local exit_code=$?
if [ $exit_code == 0 ]; then
printf "$LEIN_CHECKSUM $LEIN_JAR.pending\n" > "$LEIN_JAR.pending.shasum"
$SHASUM_CMD -c "$LEIN_JAR.pending.shasum"
if [ $? == 0 ]; then
mv -f "$LEIN_JAR.pending" "$LEIN_JAR"
else
got_sum="$($SHASUM_CMD "$LEIN_JAR.pending" | cut -f 1 -d ' ')"
checksum_failed_message "$LEIN_URL" "$LEIN_CHECKSUM" "$got_sum"
rm "$LEIN_JAR.pending" 2> /dev/null
exit 1
fi
else
rm "$LEIN_JAR.pending" 2> /dev/null
download_failed_message "$LEIN_URL" "$exit_code"
exit 1
fi
}
NOT_FOUND=1
ORIGINAL_PWD="$PWD"
while [ ! -r "$PWD/project.clj" ] && [ "$PWD" != "/" ] && [ $NOT_FOUND -ne 0 ]
do
cd ..
if [ "$(dirname "$PWD")" = "/" ]; then
NOT_FOUND=0
cd "$ORIGINAL_PWD"
fi
done
export LEIN_HOME="${LEIN_HOME:-"$HOME/.lein"}"
for f in "/etc/leinrc" "$LEIN_HOME/leinrc" ".leinrc"; do
if [ -e "$f" ]; then
source "$f"
fi
done
if $cygwin; then
export LEIN_HOME=$(cygpath -w "$LEIN_HOME")
fi
LEIN_JAR="$LEIN_HOME/self-installs/leiningen-$LEIN_VERSION-standalone.jar"
# normalize $0 on certain BSDs
if [ "$(dirname "$0")" = "." ]; then
SCRIPT="$(which "$(basename "$0")")"
if [ -z "$SCRIPT" ]; then
SCRIPT="$0"
fi
else
SCRIPT="$0"
fi
# resolve symlinks to the script itself portably
while [ -h "$SCRIPT" ] ; do
ls=$(ls -ld "$SCRIPT")
link=$(expr "$ls" : '.*-> \(.*\)$')
if expr "$link" : '/.*' > /dev/null; then
SCRIPT="$link"
else
SCRIPT="$(dirname "$SCRIPT"$)/$link"
fi
done
BIN_DIR="$(dirname "$SCRIPT")"
export LEIN_JVM_OPTS="${LEIN_JVM_OPTS-"-Xverify:none -XX:+TieredCompilation -XX:TieredStopAtLevel=1"}"
# This needs to be defined before we call HTTP_CLIENT below
if [ "$HTTP_CLIENT" = "" ]; then
if type -p curl >/dev/null 2>&1; then
if [ "$https_proxy" != "" ]; then
CURL_PROXY="-x $https_proxy"
fi
HTTP_CLIENT="curl $CURL_PROXY -f -L -o"
else
HTTP_CLIENT="wget -O"
fi
fi
# This needs to be defined before we call SHASUM_CMD below
if [ "$SHASUM_CMD" = "" ]; then
if type -p sha256sum >/dev/null 2>&1; then
export SHASUM_CMD="sha256sum"
elif type -p shasum >/dev/null 2>&1; then
export SHASUM_CMD="shasum --algorithm 256"
elif type -p sha256 >/dev/null 2>&1; then
export SHASUM_CMD="sha256 -q"
else
command_not_found sha256sum
fi
fi
# When :eval-in :classloader we need more memory
grep -E -q '^\s*:eval-in\s+:classloader\s*$' project.clj 2> /dev/null && \
export LEIN_JVM_OPTS="$LEIN_JVM_OPTS -Xms64m -Xmx512m"
if [ -r "$BIN_DIR/../src/leiningen/version.clj" ]; then
# Running from source checkout
LEIN_DIR="$(cd $(dirname "$BIN_DIR");pwd -P)"
# Need to use lein release to bootstrap the leiningen-core library (for aether)
if [ ! -r "$LEIN_DIR/leiningen-core/.lein-bootstrap" ]; then
cat <<-'EOS' 1>&2
Leiningen is missing its dependencies.
Please run "lein bootstrap" in the leiningen-core/ directory
with a stable release of Leiningen. See CONTRIBUTING.md for details.
EOS
exit 1
fi
# If project.clj for lein or leiningen-core changes, we must recalculate
LAST_PROJECT_CHECKSUM=$(cat "$LEIN_DIR/.lein-project-checksum" 2> /dev/null)
PROJECT_CHECKSUM=$(sum "$LEIN_DIR/project.clj" "$LEIN_DIR/leiningen-core/project.clj")
if [ "$PROJECT_CHECKSUM" != "$LAST_PROJECT_CHECKSUM" ]; then
if [ -r "$LEIN_DIR/.lein-classpath" ]; then
rm "$LEIN_DIR/.lein-classpath"
fi
fi
# Use bin/lein to calculate its own classpath.
if [ ! -r "$LEIN_DIR/.lein-classpath" ] && [ "$1" != "classpath" ]; then
msg "Recalculating Leiningen's classpath."
cd "$LEIN_DIR"
LEIN_NO_USER_PROFILES=1 "$LEIN_DIR/bin/lein" classpath .lein-classpath
sum "$LEIN_DIR/project.clj" "$LEIN_DIR/leiningen-core/project.clj" > \
.lein-project-checksum
cd -
fi
mkdir -p "$LEIN_DIR/target/classes"
export LEIN_JVM_OPTS="$LEIN_JVM_OPTS -Dclojure.compile.path=$LEIN_DIR/target/classes"
add_path CLASSPATH "$LEIN_DIR/leiningen-core/src/" "$LEIN_DIR/leiningen-core/resources/" \
"$LEIN_DIR/test:$LEIN_DIR/target/classes" "$LEIN_DIR/src" ":$LEIN_DIR/resources"
if [ -r "$LEIN_DIR/.lein-classpath" ]; then
add_path CLASSPATH "$(cat "$LEIN_DIR/.lein-classpath" 2> /dev/null)"
else
add_path CLASSPATH "$(cat "$LEIN_DIR/leiningen-core/.lein-bootstrap" 2> /dev/null)"
fi
else # Not running from a checkout
add_path CLASSPATH "$LEIN_JAR"
if [ "$LEIN_USE_BOOTCLASSPATH" != "no" ]; then
LEIN_JVM_OPTS="-Xbootclasspath/a:$LEIN_JAR $LEIN_JVM_OPTS"
fi
if [ ! -r "$LEIN_JAR" -a "$1" != "self-install" ]; then
self_install
fi
fi
if [ ! -x "$JAVA_CMD" ] && ! type -f java >/dev/null
then
msg "Leiningen couldn't find 'java' executable, which is required."
msg "Please either set JAVA_CMD or put java (>=1.6) in your \$PATH ($PATH)."
exit 1
fi
export LEIN_JAVA_CMD="${LEIN_JAVA_CMD:-${JAVA_CMD:-java}}"
if [[ -z "${DRIP_INIT+x}" && "$(basename "$LEIN_JAVA_CMD")" == *drip* ]]; then
export DRIP_INIT="$(printf -- '-e\n(require (quote leiningen.repl))')"
export DRIP_INIT_CLASS="clojure.main"
fi
# Support $JAVA_OPTS for backwards-compatibility.
export JVM_OPTS="${JVM_OPTS:-"$JAVA_OPTS"}"
# Handle jline issue with cygwin not propagating OSTYPE through java subprocesses: https://github.com/jline/jline2/issues/62
cygterm=false
if $cygwin; then
case "$TERM" in
rxvt* | xterm* | vt*) cygterm=true ;;
esac
fi
if $cygterm; then
LEIN_JVM_OPTS="$LEIN_JVM_OPTS -Djline.terminal=jline.UnixTerminal"
stty -icanon min 1 -echo > /dev/null 2>&1
fi
# TODO: investigate http://skife.org/java/unix/2011/06/20/really_executable_jars.html
# If you're packaging this for a package manager (.deb, homebrew, etc)
# you need to remove the self-install and upgrade functionality or see lein-pkg.
if [ "$1" = "self-install" ]; then
if [ -r "$BIN_DIR/../src/leiningen/version.clj" ]; then
cat <<-'EOS' 1>&2
Running self-install from a checkout is not supported.
See CONTRIBUTING.md for SNAPSHOT-specific build instructions.
EOS
exit 1
fi
msg "Manual self-install is deprecated; it will run automatically when necessary."
self_install
elif [ "$1" = "upgrade" ] || [ "$1" = "downgrade" ]; then
if [ "$LEIN_DIR" != "" ]; then
msg "The upgrade task is not meant to be run from a checkout."
exit 1
fi
if [ $SNAPSHOT = "YES" ]; then
cat <<-'EOS' 1>&2
The upgrade task is only meant for stable releases.
See the "Bootstrapping" section of CONTRIBUTING.md.
EOS
exit 1
fi
if [ ! -w "$SCRIPT" ]; then
msg "You do not have permission to upgrade the installation in $SCRIPT"
exit 1
else
TARGET_VERSION="${2:-stable}"
echo "The script at $SCRIPT will be upgraded to the latest $TARGET_VERSION version."
echo -n "Do you want to continue [Y/n]? "
read RESP
case "$RESP" in
y|Y|"")
echo
msg "Upgrading..."
TARGET="/tmp/lein-${$}-upgrade"
if $cygwin; then
TARGET=$(cygpath -w "$TARGET")
fi
LEIN_SCRIPT_URL="https://github.com/technomancy/leiningen/raw/$TARGET_VERSION/bin/lein"
$HTTP_CLIENT "$TARGET" "$LEIN_SCRIPT_URL"
if [ $? == 0 ]; then
cmp -s "$TARGET" "$SCRIPT"
if [ $? == 0 ]; then
msg "Leiningen is already up-to-date."
fi
mv "$TARGET" "$SCRIPT" && chmod +x "$SCRIPT"
unset CLASSPATH
exec "$SCRIPT" version
else
download_failed_message "$LEIN_SCRIPT_URL"
fi;;
*)
msg "Aborted."
exit 1;;
esac
fi
else
if $cygwin; then
# When running on Cygwin, use Windows-style paths for java
ORIGINAL_PWD=$(cygpath -w "$ORIGINAL_PWD")
fi
# apply context specific CLASSPATH entries
if [ -f .lein-classpath ]; then
add_path CLASSPATH "$(cat .lein-classpath)"
fi
if [ -n "$DEBUG" ]; then
msg "Leiningen's classpath: $CLASSPATH"
fi
if [ -r .lein-fast-trampoline ]; then
export LEIN_FAST_TRAMPOLINE='y'
fi
if [ "$LEIN_FAST_TRAMPOLINE" != "" ] && [ -r project.clj ]; then
INPUTS="$* $(cat project.clj) $LEIN_VERSION $(test -f "$LEIN_HOME/profiles.clj" && cat "$LEIN_HOME/profiles.clj") $(test -f profiles.clj && cat profiles.clj)"
INPUT_CHECKSUM=$(echo "$INPUTS" | $SHASUM_CMD | cut -f 1 -d " ")
# Just don't change :target-path in project.clj, mkay?
TRAMPOLINE_FILE="target/trampolines/$INPUT_CHECKSUM"
else
if hash mktemp 2>/dev/null; then
# Check if mktemp is available before using it
TRAMPOLINE_FILE="$(mktemp /tmp/lein-trampoline-XXXXXXXXXXXXX)"
else
TRAMPOLINE_FILE="/tmp/lein-trampoline-$$"
fi
trap 'rm -f $TRAMPOLINE_FILE' EXIT
fi
if $cygwin; then
TRAMPOLINE_FILE=$(cygpath -w "$TRAMPOLINE_FILE")
fi
if [ "$INPUT_CHECKSUM" != "" ] && [ -r "$TRAMPOLINE_FILE" ]; then
if [ -n "$DEBUG" ]; then
msg "Fast trampoline with $TRAMPOLINE_FILE."
fi
exec sh -c "exec $(cat "$TRAMPOLINE_FILE")"
else
export TRAMPOLINE_FILE
"$LEIN_JAVA_CMD" \
-Dfile.encoding=UTF-8 \
-Dmaven.wagon.http.ssl.easy=false \
-Dmaven.wagon.rto=10000 \
$LEIN_JVM_OPTS \
-Dleiningen.input-checksum="$INPUT_CHECKSUM" \
-Dleiningen.original.pwd="$ORIGINAL_PWD" \
-Dleiningen.script="$SCRIPT" \
-classpath "$CLASSPATH" \
clojure.main -m leiningen.core.main "$@"
EXIT_CODE=$?
if $cygterm ; then
stty icanon echo > /dev/null 2>&1
fi
if [ -r "$TRAMPOLINE_FILE" ] && [ "$LEIN_TRAMPOLINE_WARMUP" = "" ]; then
TRAMPOLINE="$(cat "$TRAMPOLINE_FILE")"
if [ "$INPUT_CHECKSUM" = "" ]; then # not using fast trampoline
rm "$TRAMPOLINE_FILE"
fi
if [ "$TRAMPOLINE" = "" ]; then
exit $EXIT_CODE
else
exec sh -c "exec $TRAMPOLINE"
fi
else
exit $EXIT_CODE
fi
fi
fi

@ -1,11 +0,0 @@
FROM clojure
RUN apt update
RUN apt -yqq --no-install-recommends --yes install curl default-jre-headless
RUN curl -L -o /tmp/serverspec.jar \
https://github.com/DomainDrivenArchitecture/dda-serverspec-crate/releases/download/2.0.0/dda-serverspec-standalone.jar
COPY serverspec.edn /tmp/serverspec.edn
RUN java -jar /tmp/serverspec.jar /tmp/serverspec.edn -v

@ -0,0 +1,57 @@
from os import environ
from datetime import datetime
from pybuilder.core import task, init
from ddadevops import *
name = "ddadevops"
MODULE = "ddadevops"
PROJECT_ROOT_PATH = "../.."
version = "4.12.1-dev"
@init
def initialize(project):
image_tag = version
if "dev" in image_tag:
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
input = {
"name": name,
"module": MODULE,
"stage": "notused",
"project_root_path": PROJECT_ROOT_PATH,
"build_types": ["IMAGE"],
"mixin_types": [],
"image_naming": "NAME_ONLY",
"image_tag": f"{image_tag}",
}
project.build_depends_on("ddadevops>=4.9.0")
build = DevopsImageBuild(project, input)
build.initialize_build_dir()
@task
def image(project):
build = get_devops_build(project)
build.image()
@task
def drun(project):
build = get_devops_build(project)
build.drun()
@task
def test(project):
build = get_devops_build(project)
build.test()
@task
def publish(project):
build = get_devops_build(project)
build.dockerhub_login()
build.dockerhub_publish()

@ -0,0 +1,5 @@
FROM python:3.10-alpine
ADD resources /tmp
RUN /tmp/install.sh

@ -0,0 +1,19 @@
#!/bin/sh
set -exo pipefail
function main() {
{
upgradeSystem
apk add --no-cache python3 py3-pip openssl-dev bash git curl
python3 -m pip install -U pip
pip3 install pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml inflection
cleanupDocker
} > /dev/null
}
source /tmp/install_functions_alpine.sh
main

@ -1,6 +0,0 @@
FROM docker:latest
RUN set -eux;
RUN apk add --no-cache build-base rust python3 python3-dev py3-pip py3-setuptools py3-wheel libffi-dev openssl-dev cargo bash git;
RUN python3 -m pip install -U pip;
RUN pip3 install pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml;

@ -1,11 +0,0 @@
FROM devops-build
RUN apk update
RUN apk add curl openjdk8
RUN curl -L -o /tmp/serverspec.jar \
https://github.com/DomainDrivenArchitecture/dda-serverspec-crate/releases/download/2.0.0/dda-serverspec-standalone.jar
COPY serverspec.edn /tmp/serverspec.edn
RUN java -jar /tmp/serverspec.jar /tmp/serverspec.edn -v

@ -0,0 +1,57 @@
from os import environ
from datetime import datetime
from pybuilder.core import task, init
from ddadevops import *
name = "ddadevops"
MODULE = "dind"
PROJECT_ROOT_PATH = "../.."
version = "4.12.1-dev"
@init
def initialize(project):
image_tag = version
if "dev" in image_tag:
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
input = {
"name": name,
"module": MODULE,
"stage": "notused",
"project_root_path": PROJECT_ROOT_PATH,
"build_types": ["IMAGE"],
"mixin_types": [],
"image_naming": "NAME_AND_MODULE",
"image_tag": f"{image_tag}",
}
project.build_depends_on("ddadevops>=4.7.0")
build = DevopsImageBuild(project, input)
build.initialize_build_dir()
@task
def image(project):
build = get_devops_build(project)
build.image()
@task
def drun(project):
build = get_devops_build(project)
build.drun()
@task
def test(project):
build = get_devops_build(project)
build.test()
@task
def publish(project):
build = get_devops_build(project)
build.dockerhub_login()
build.dockerhub_publish()

@ -0,0 +1,5 @@
FROM docker:latest
WORKDIR /tmp
ADD resources ./
RUN ./install.sh

@ -0,0 +1,17 @@
#!/bin/sh
set -exo pipefail
function main() {
{
upgradeSystem
apk add --no-cache python3 py3-pip openssl-dev bash git
pip3 install --break-system-packages pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml inflection
cleanupDocker
} > /dev/null
}
source /tmp/install_functions_alpine.sh
main

@ -0,0 +1,57 @@
from os import environ
from datetime import datetime
from pybuilder.core import task, init
from ddadevops import *
name = "ddadevops"
MODULE = "kotlin"
PROJECT_ROOT_PATH = "../.."
version = "4.12.1-dev"
@init
def initialize(project):
image_tag = version
if "dev" in image_tag:
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
input = {
"name": name,
"module": MODULE,
"stage": "notused",
"project_root_path": PROJECT_ROOT_PATH,
"build_types": ["IMAGE"],
"mixin_types": [],
"image_naming": "NAME_AND_MODULE",
"image_tag": f"{image_tag}",
}
project.build_depends_on("ddadevops>=4.0.0")
build = DevopsImageBuild(project, input)
build.initialize_build_dir()
@task
def image(project):
build = get_devops_build(project)
build.image()
@task
def drun(project):
build = get_devops_build(project)
build.drun()
@task
def test(project):
build = get_devops_build(project)
build.test()
@task
def publish(project):
build = get_devops_build(project)
build.dockerhub_login()
build.dockerhub_publish()

@ -0,0 +1,4 @@
FROM debian:stable-slim
ADD resources /tmp
RUN /tmp/install.sh

@ -0,0 +1,17 @@
#!/bin/bash
set -exo pipefail
function main() {
{
upgradeSystem
apt-get -qqy install curl git kotlin gradle iputils-ping ssh python3 python3-pip
pip3 install --break-system-packages pybuilder 'ddadevops>=4.7.0' deprecation dda-python-terraform boto3 pyyaml inflection
cleanupDocker
} > /dev/null
}
source /tmp/install_functions_debian.sh
DEBIAN_FRONTEND=noninteractive DEBCONF_NOWARNINGS=yes main

@ -1,14 +1,19 @@
from os import environ
from datetime import datetime
from pybuilder.core import task, init
from ddadevops import *
name = "devops-build"
MODULE = "image"
name = "ddadevops"
MODULE = "python"
PROJECT_ROOT_PATH = "../.."
version = "4.12.1-dev"
@init
def initialize(project):
image_tag = version
if "dev" in image_tag:
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
input = {
"name": name,
@ -17,10 +22,12 @@ def initialize(project):
"project_root_path": PROJECT_ROOT_PATH,
"build_types": ["IMAGE"],
"mixin_types": [],
"image_naming": "NAME_AND_MODULE",
"image_tag": f"{image_tag}",
}
project.build_depends_on("ddadevops>=4.0.0")
build = DevopsImageBuild(project, input)
build.initialize_build_dir()

@ -0,0 +1,4 @@
FROM python:3.10-alpine
ADD resources /tmp
RUN /tmp/install.sh

@ -0,0 +1,20 @@
#!/bin/sh
set -exo pipefail
function main() {
{
upgradeSystem
apk add --no-cache build-base rust python3 python3-dev py3-pip py3-setuptools py3-wheel libffi-dev openssl-dev cargo bash git curl
python3 -m pip install -U pip
pip3 install pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml inflection \
coverage flake8 flake8-polyfill mypy mypy-extensions pycodestyle pyflakes pylint pytest pytest-cov pytest-datafiles types-setuptools types-PyYAML
cleanupDocker
} > /dev/null
}
source /tmp/install_functions_alpine.sh
main

@ -4,7 +4,9 @@ from ..infrastructure import FileApi, ResourceApi, ImageApi
class ImageBuildService:
def __init__(self, file_api: FileApi, resource_api: ResourceApi, image_api: ImageApi):
def __init__(
self, file_api: FileApi, resource_api: ResourceApi, image_api: ImageApi
):
self.file_api = file_api
self.resource_api = resource_api
self.image_api = image_api
@ -18,7 +20,9 @@ class ImageBuildService:
)
def __copy_build_resource_file_from_package__(self, resource_name, devops: Devops):
data = self.resource_api.read_resource(f"src/main/resources/docker/{resource_name}")
data = self.resource_api.read_resource(
f"src/main/resources/docker/{resource_name}"
)
self.file_api.write_data_to_file(
Path(f"{devops.build_path()}/{resource_name}"), data
)
@ -27,12 +31,16 @@ class ImageBuildService:
self.__copy_build_resource_file_from_package__(
"image/resources/install_functions.sh", devops
)
self.__copy_build_resource_file_from_package__(
"image/resources/install_functions_debian.sh", devops
)
self.__copy_build_resource_file_from_package__(
"image/resources/install_functions_alpine.sh", devops
)
def __copy_build_resources_from_dir__(self, devops: Devops):
image = devops.specialized_builds[BuildType.IMAGE]
self.file_api.cp_force(
image.build_commons_path(), devops.build_path()
)
self.file_api.cp_force(image.build_commons_path(), devops.build_path())
def initialize_build_dir(self, devops: Devops):
image = devops.specialized_builds[BuildType.IMAGE]
@ -43,13 +51,18 @@ class ImageBuildService:
else:
self.__copy_build_resources_from_dir__(devops)
self.file_api.cp_recursive("image", build_path)
self.file_api.cp_recursive("test", build_path)
try:
self.file_api.cp_recursive("test", build_path)
except:
print("Folder 'test' not found")
def image(self, devops: Devops):
self.image_api.image(devops.name, devops.build_path())
image = devops.specialized_builds[BuildType.IMAGE]
self.image_api.image(image.image_name(), devops.build_path())
def drun(self, devops: Devops):
self.image_api.drun(devops.name)
image = devops.specialized_builds[BuildType.IMAGE]
self.image_api.drun(image.image_name())
def dockerhub_login(self, devops: Devops):
image = devops.specialized_builds[BuildType.IMAGE]
@ -59,9 +72,14 @@ class ImageBuildService:
def dockerhub_publish(self, devops: Devops):
image = devops.specialized_builds[BuildType.IMAGE]
if image.image_tag is not None:
self.image_api.dockerhub_publish(
image.image_name(), image.image_dockerhub_user, image.image_tag
)
self.image_api.dockerhub_publish(
devops.name, image.image_dockerhub_user, image.image_tag
image.image_name(), image.image_dockerhub_user, 'latest'
)
def test(self, devops: Devops):
self.image_api.test(devops.name, devops.build_path())
image = devops.specialized_builds[BuildType.IMAGE]
self.image_api.test(image.image_name(), devops.build_path())

@ -1,18 +1,26 @@
import json
from typing import List
from pathlib import Path
from ..infrastructure import GitApi, BuildFileRepository
from ..domain import Version, Release, ReleaseType
from ..infrastructure import GitApi, ArtifactDeploymentApi, BuildFileRepository
from ..domain import Version, Release, ReleaseType, Artifact
class ReleaseService:
def __init__(self, git_api: GitApi, build_file_repository: BuildFileRepository):
def __init__(
self,
git_api: GitApi,
artifact_deployment_api: ArtifactDeploymentApi,
build_file_repository: BuildFileRepository,
):
self.git_api = git_api
self.artifact_deployment_api = artifact_deployment_api
self.build_file_repository = build_file_repository
@classmethod
def prod(cls, base_dir: str):
return cls(
GitApi(),
ArtifactDeploymentApi(),
BuildFileRepository(base_dir),
)
@ -53,6 +61,41 @@ class ReleaseService:
)
self.git_api.push_follow_tags()
def publish_artifacts(self, release: Release):
token = str(release.release_artifact_token)
release_id = self.__parse_forgejo_release_id__(
self.artifact_deployment_api.create_forgejo_release(
release.forgejo_release_api_endpoint(),
release.version.to_string(),
token,
)
)
artifacts_sums = []
for artifact in release.release_artifacts:
sha256 = self.artifact_deployment_api.calculate_sha256(artifact.path())
sha512 = self.artifact_deployment_api.calculate_sha512(artifact.path())
artifacts_sums += [Artifact(sha256), Artifact(sha512)]
artifacts = release.release_artifacts + artifacts_sums
print(artifacts)
for artifact in artifacts:
print(str)
self.artifact_deployment_api.add_asset_to_release(
release.forgejo_release_asset_api_endpoint(release_id),
artifact.path(),
artifact.type(),
token,
)
def __parse_forgejo_release_id__(self, release_response: str) -> int:
parsed = json.loads(release_response)
try:
result = parsed["id"]
except:
raise RuntimeError(str(parsed))
return result
def __set_version_and_commit__(
self, version: Version, build_file_ids: List[str], message: str
):

@ -17,6 +17,7 @@ from .provider_hetzner import Hetzner
from .provider_aws import Aws
from .provs_k3s import K3s
from .release import Release
from .artifact import Artifact
from .credentials import Credentials, CredentialMapping, GopassType
from .version import Version
from .build_file import BuildFileType, BuildFile

@ -0,0 +1,46 @@
from enum import Enum
from pathlib import Path
from .common import (
Validateable,
)
class ArtifactType(Enum):
TEXT = 0
JAR = 1
class Artifact(Validateable):
def __init__(self, path: str):
self.path_str = path
def path(self) -> Path:
return Path(self.path_str)
def type(self) -> str:
suffix = self.path().suffix
match suffix:
case ".jar":
return "application/x-java-archive"
case ".js":
return "application/x-javascript"
case _:
return "text/plain"
def validate(self):
result = []
result += self.__validate_is_not_empty__("path_str")
try:
Path(self.path_str)
except Exception as e:
result += [f"path was not a valid: {e}"]
return result
def __str__(self):
return str(self.path())
def __eq__(self, other):
return other and self.__str__() == other.__str__()
def __hash__(self) -> int:
return self.__str__().__hash__()

@ -45,38 +45,27 @@ class BuildFile(Validateable):
result = None
return result
def __get_file_type_regex_str(self, file_type: BuildFileType):
match file_type:
case BuildFileType.JAVA_GRADLE:
return r'(?P<pre_version>\bversion\s?=\s?)\"(?P<version>\d*\.\d*\.\d*(-SNAPSHOT)?)\"'
case BuildFileType.PYTHON:
return r'(?P<pre_version>\bversion\s?=\s?)\"(?P<version>\d*\.\d*\.\d*(-SNAPSHOT|-dev\d*)?)\"'
case BuildFileType.JAVA_CLOJURE:
return r'(?P<pre_version>\(defproject\s(\S)*\s)\"(?P<version>\d*\.\d*\.\d*(-SNAPSHOT)?)\"'
case _:
return ""
def get_version(self) -> Version:
try:
match self.build_file_type():
build_file_type = self.build_file_type()
match build_file_type:
case BuildFileType.JS:
version_str = json.loads(self.content)["version"]
case BuildFileType.JAVA_GRADLE:
# TODO: '\nversion = ' will not parse all ?!
version_line = re.search("\nversion = .*", self.content)
version_line_group = version_line.group()
version_string = re.search(
"[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?", version_line_group
)
version_str = version_string.group()
case BuildFileType.PYTHON:
# TODO: '\nversion = ' will not parse all ?!
version_line = re.search("\nversion = .*\n", self.content)
version_line_group = version_line.group()
version_string = re.search(
"[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?(-dev)?[0-9]*",
version_line_group,
)
version_str = version_string.group()
case BuildFileType.JAVA_CLOJURE:
# TODO: unsure about the trailing '\n' !
version_line = re.search("\\(defproject .*\n", self.content)
version_line_group = version_line.group()
version_string = re.search(
"[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?", version_line_group
)
version_str = version_string.group()
case BuildFileType.JAVA_GRADLE | BuildFileType.PYTHON | BuildFileType.JAVA_CLOJURE:
version_str = re.search(self.__get_file_type_regex_str(build_file_type), self.content).group("version")
except:
raise Exception(f"Version not found in file {self.file_path}")
raise RuntimeError(f"Version not found in file {self.file_path}")
result = Version.from_str(version_str, self.get_default_suffix())
result.throw_if_invalid()
@ -84,38 +73,27 @@ class BuildFile(Validateable):
return result
def set_version(self, new_version: Version):
# TODO: How can we create regex-pattern constants to use them at both places?
if new_version.is_snapshot():
new_version.snapshot_suffix = self.get_default_suffix()
try:
match self.build_file_type():
build_file_type = self.build_file_type()
match build_file_type:
case BuildFileType.JS:
json_data = json.loads(self.content)
json_data["version"] = new_version.to_string()
self.content = json.dumps(json_data, indent=4)
case BuildFileType.JAVA_GRADLE:
substitute = re.sub(
'\nversion = "[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?"',
f'\nversion = "{new_version.to_string()}"',
self.content,
)
self.content = substitute
case BuildFileType.PYTHON:
substitute = re.sub(
'\nversion = "[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?(-dev)?[0-9]*"',
f'\nversion = "{new_version.to_string()}"',
self.content,
)
self.content = substitute
case BuildFileType.JAVA_CLOJURE:
# TODO: we should stick here on defproject instead of first line!
case BuildFileType.JAVA_GRADLE | BuildFileType.PYTHON | BuildFileType.JAVA_CLOJURE:
substitute = re.sub(
'"[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?"',
f'"{new_version.to_string()}"',
self.__get_file_type_regex_str(build_file_type),
fr'\g<pre_version>"{new_version.to_string()}"',
self.content,
1,
)
self.content = substitute
except:
raise Exception(f"Version not found in file {self.file_path}")
raise RuntimeError(f"Version not found in file {self.file_path}")
def get_default_suffix(self) -> str:
result = "SNAPSHOT"

@ -1,3 +1,4 @@
from enum import Enum
from typing import List, Dict
from .common import (
filter_none,
@ -5,15 +6,23 @@ from .common import (
)
class NamingType(Enum):
NAME_ONLY = 1
NAME_AND_MODULE = 2
class Image(Validateable):
def __init__(
self,
inp: dict,
):
self.module = inp.get("module")
self.name = inp.get("name")
self.image_dockerhub_user = inp.get("image_dockerhub_user")
self.image_dockerhub_password = inp.get("image_dockerhub_password")
self.image_tag = inp.get("image_tag")
self.image_build_commons_path = inp.get("image_build_commons_path")
self.image_naming = NamingType[inp.get("image_naming", "NAME_ONLY")]
self.image_use_package_common_files = inp.get(
"image_use_package_common_files", True
)
@ -23,8 +32,10 @@ class Image(Validateable):
def validate(self) -> List[str]:
result = []
result += self.__validate_is_not_empty__("name")
result += self.__validate_is_not_empty__("image_dockerhub_user")
result += self.__validate_is_not_empty__("image_dockerhub_password")
result += self.__validate_is_not_empty__("image_naming")
if not self.image_use_package_common_files:
result += self.__validate_is_not_empty__("image_build_commons_path")
result += self.__validate_is_not_empty__("image_build_commons_dir_name")
@ -37,6 +48,16 @@ class Image(Validateable):
]
return "/".join(filter_none(commons_path)) + "/"
def image_name(self) -> str:
result: List[str] = [self.name] # type: ignore
if (
self.image_naming == NamingType.NAME_AND_MODULE
and self.module
and self.module != ""
):
result.append(self.module)
return "-".join(result)
@classmethod
def get_mapping_default(cls) -> List[Dict[str, str]]:
return [

@ -8,7 +8,7 @@ from .provider_digitalocean import Digitalocean
from .provider_hetzner import Hetzner
from .c4k import C4k
from .image import Image
from .release import ReleaseType
from .release import ReleaseType, Release
from ..infrastructure import BuildFileRepository, CredentialsApi, EnvironmentApi, GitApi
@ -69,6 +69,7 @@ class InitService:
Path(primary_build_file_id)
)
version = primary_build_file.get_version()
default_mappings += Release.get_mapping_default()
credentials = Credentials(inp, default_mappings)
authorization = self.authorization(credentials)
@ -111,9 +112,8 @@ class InitService:
result = {}
for name in credentials.mappings.keys():
mapping = credentials.mappings[name]
env_value = self.environment_api.get(mapping.name_for_environment())
if env_value:
result[name] = env_value
if self.environment_api.is_defined(mapping.name_for_environment()):
result[name] = self.environment_api.get(mapping.name_for_environment())
else:
if mapping.gopass_type() == GopassType.FIELD:
result[name] = self.credentials_api.gopass_field_from_path(

@ -36,6 +36,8 @@ class Aws(Validateable, CredentialMappingDefault):
result = {}
if self.aws_as_backend:
result = {
"access_key": self.aws_access_key,
"secret_key": self.aws_secret_key,
"bucket": self.aws_bucket,
"key": self.__bucket_key__(),
"region": self.aws_region,

@ -20,6 +20,14 @@ CONFIG_CERTMANAGER = """certmanager:
"""
CONFIG_ECHO = """echo: $echo
"""
CONFIG_HETZNER_CSI = """hetzner:
hcloudApiToken:
source: "PLAIN" # PLAIN, GOPASS or PROMPT
parameter: $hcloud_api # the api key for the hetzner cloud
encryptionPassphrase:
source: "PLAIN" # PLAIN, GOPASS or PROMPT
parameter: $encryption # the encryption passphrase for created volumes
"""
class K3s(Validateable):
@ -28,8 +36,11 @@ class K3s(Validateable):
self.k3s_letsencrypt_email = inp.get("k3s_letsencrypt_email")
self.k3s_letsencrypt_endpoint = inp.get("k3s_letsencrypt_endpoint", "staging")
self.k3s_app_filename_to_provision = inp.get("k3s_app_filename_to_provision")
self.k3s_enable_echo = inp.get("k3s_enable_echo", "false")
self.k3s_enable_echo = inp.get("k3s_enable_echo", None)
self.k3s_provs_template = inp.get("k3s_provs_template", None)
self.k3s_enable_hetzner_csi = inp.get("k3s_enable_hetzner_csi", False)
self.k3s_hetzner_api_token = inp.get("k3s_hetzner_api_token", None)
self.k3s_hetzner_encryption_passphrase = inp.get("k3s_hetzner_encryption_passphrase", None)
self.provision_dns: Optional[DnsRecord] = None
def validate(self) -> List[str]:
@ -37,6 +48,9 @@ class K3s(Validateable):
result += self.__validate_is_not_empty__("k3s_letsencrypt_email")
result += self.__validate_is_not_empty__("k3s_letsencrypt_endpoint")
result += self.__validate_is_not_empty__("k3s_app_filename_to_provision")
if self.k3s_enable_hetzner_csi:
result += self.__validate_is_not_empty__("k3s_hetzner_api_token")
result += self.__validate_is_not_empty__("k3s_hetzner_encryption_passphrase")
if self.provision_dns:
result += self.provision_dns.validate()
return result
@ -61,6 +75,9 @@ class K3s(Validateable):
substitutes["letsencrypt_endpoint"] = self.k3s_letsencrypt_endpoint
if self.k3s_enable_echo is not None:
substitutes["echo"] = self.k3s_enable_echo
if self.k3s_enable_hetzner_csi:
substitutes["hcloud_api"] = self.k3s_hetzner_api_token
substitutes["encryption"] = self.k3s_hetzner_encryption_passphrase
return self.__config_template__().substitute(substitutes)
def command(self, devops: Devops):
@ -89,4 +106,6 @@ class K3s(Validateable):
template_text += CONFIG_IPV4
if self.provision_dns.ipv6 is not None:
template_text += CONFIG_IPV6
if self.k3s_enable_hetzner_csi:
template_text += CONFIG_HETZNER_CSI
return Template(template_text)

@ -1,4 +1,4 @@
from typing import Optional, List
from typing import Optional, List, Dict
from pathlib import Path
from .common import (
Validateable,
@ -7,6 +7,9 @@ from .common import (
from .version import (
Version,
)
from .artifact import (
Artifact,
)
class Release(Validateable):
@ -21,6 +24,13 @@ class Release(Validateable):
"release_secondary_build_files", []
)
self.version = version
self.release_artifact_server_url = inp.get("release_artifact_server_url")
self.release_organisation = inp.get("release_organisation")
self.release_repository_name = inp.get("release_repository_name")
self.release_artifact_token = inp.get("release_artifact_token")
self.release_artifacts = []
for a in inp.get("release_artifacts", []):
self.release_artifacts.append(Artifact(a))
def update_release_type(self, release_type: ReleaseType):
self.release_type = release_type
@ -53,10 +63,44 @@ class Release(Validateable):
and self.release_type != ReleaseType.NONE
and self.release_main_branch != self.release_current_branch
):
result.append(f"Releases are allowed only on {self.release_main_branch}")
result.append(
f"Releases are allowed only on {self.release_main_branch}"
)
return result
def validate_for_artifact(self):
result = []
result += self.__validate_is_not_empty__("release_artifact_server_url")
result += self.__validate_is_not_empty__("release_organisation")
result += self.__validate_is_not_empty__("release_repository_name")
result += self.__validate_is_not_empty__("release_artifact_token")
return result
def build_files(self) -> List[str]:
result = [self.release_primary_build_file]
result += self.release_secondary_build_files
return result
def forgejo_release_api_endpoint(self) -> str:
validation = self.validate_for_artifact()
if validation != []:
raise RuntimeError(f"not valid for creating artifacts: {validation}")
server_url = self.release_artifact_server_url.removeprefix("/").removesuffix(
"/"
)
organisation = self.release_organisation.removeprefix("/").removesuffix("/")
repository = self.release_repository_name.removeprefix("/").removesuffix("/")
return f"{server_url}/api/v1/repos/{organisation}/{repository}/releases"
def forgejo_release_asset_api_endpoint(self, release_id: int) -> str:
return f"{self.forgejo_release_api_endpoint()}/{release_id}/assets"
@classmethod
def get_mapping_default(cls) -> List[Dict[str, str]]:
return [
{
"gopass_path": "server/meissa/repo/buero-rw",
"name": "release_artifact_token",
}
]

@ -32,12 +32,6 @@ class Version(Validateable):
self.snapshot_suffix = snapshot_suffix
self.default_snapshot_suffix = default_snapshot_suffix
def __eq__(self, other):
return other and self.to_string() == other.to_string()
def __hash__(self) -> int:
return self.to_string().__hash__()
def is_snapshot(self):
return self.snapshot_suffix is not None
@ -139,3 +133,9 @@ class Version(Validateable):
snapshot_suffix=None,
version_str=None,
)
def __eq__(self, other):
return other and self.to_string() == other.to_string()
def __hash__(self) -> int:
return self.to_string().__hash__()

@ -7,5 +7,6 @@ from .infrastructure import (
CredentialsApi,
GitApi,
TerraformApi,
ArtifactDeploymentApi,
)
from .repository import DevopsRepository, BuildFileRepository

@ -1,4 +1,4 @@
from subprocess import Popen, PIPE, run
from subprocess import Popen, PIPE, run, CalledProcessError
from pathlib import Path
from sys import stdout
from os import chmod, environ
@ -49,55 +49,32 @@ class FileApi:
class ImageApi:
def __init__(self):
self.execution_api = ExecutionApi()
def image(self, name: str, path: Path):
run(
f"docker build -t {name} --file {path}/image/Dockerfile {path}/image",
shell=True,
check=True,
self.execution_api.execute_live(
f"docker build -t {name} --file {path}/image/Dockerfile {path}/image"
)
def drun(self, name: str):
run(
f'docker run -it --entrypoint="" {name} /bin/bash',
shell=True,
check=True,
self.execution_api.execute_live(
f'docker run -it {name} /bin/bash'
)
def dockerhub_login(self, username: str, password: str):
run(
self.execution_api.execute_secure(
f"docker login --username {username} --password {password}",
shell=True,
check=True,
"docker login --username ***** --password *****",
)
def dockerhub_publish(self, name: str, username: str, tag=None):
if tag is not None:
run(
f"docker tag {name} {username}/{name}:{tag}",
shell=True,
check=True,
)
run(
f"docker push {username}/{name}:{tag}",
shell=True,
check=True,
)
run(
f"docker tag {name} {username}/{name}:latest",
shell=True,
check=True,
)
run(
f"docker push {username}/{name}:latest",
shell=True,
check=True,
)
def dockerhub_publish(self, name: str, username: str, tag: str):
self.execution_api.execute_live(f"docker tag {name} {username}/{name}:{tag}")
self.execution_api.execute_live(f"docker push {username}/{name}:{tag}")
def test(self, name: str, path: Path):
run(
f"docker build -t {name} -test --file {path}/test/Dockerfile {path}/test",
shell=True,
check=True,
self.execution_api.execute_live(
f"docker build -t {name} -test --file {path}/test/Dockerfile {path}/test"
)
@ -107,28 +84,58 @@ class ExecutionApi:
if dry_run:
print(command)
else:
# output = check_output(command, encoding="UTF-8", shell=shell)
output = run(
command, encoding="UTF-8", shell=shell, stdout=PIPE, check=check
).stdout
output = output.rstrip()
try:
output = run(
command,
shell=shell,
check=check,
stdout=PIPE,
stderr=PIPE,
text=True,
).stdout
output = output.rstrip()
except CalledProcessError as exc:
print(
f"Command failed with code: {exc.returncode} and message: {exc.stderr}"
)
raise exc
return output
def execute_live(self, command, dry_run=False, shell=True):
def execute_secure(
self,
command: str,
sanitized_command: str,
dry_run=False,
shell=True,
check=True,
):
try:
output = self.execute(command, dry_run, shell, check)
return output
except CalledProcessError as exc:
sanitized_exc = exc
sanitized_exc.cmd = sanitized_command
raise sanitized_exc
def execute_live(self, command: str, dry_run=False, shell=True):
if dry_run:
print(command)
else:
process = Popen(command, stdout=PIPE, shell=shell)
for line in iter(process.stdout.readline, b""):
print(line.decode("utf-8"), end="")
process.stdout.close()
process.wait()
process = Popen(command, shell=shell)
outs, errs = process.communicate()
while outs is not None:
stdout.buffer.write(outs)
if process.returncode != 0:
raise RuntimeError(f"Execute live '{command}' failed with code {process.returncode}\nerrs: {errs}")
class EnvironmentApi:
def get(self, key):
return environ.get(key)
def is_defined(self, key):
return key in environ
class CredentialsApi:
def __init__(self):
@ -207,3 +214,53 @@ class GitApi:
class TerraformApi:
pass
class ArtifactDeploymentApi:
def __init__(self):
self.execution_api = ExecutionApi()
def create_forgejo_release(self, api_endpoint_url: str, tag: str, token: str):
command = (
f'curl -X "POST" "{api_endpoint_url}" '
+ ' -H "accept: application/json" -H "Content-Type: application/json"'
+ f' -d \'{{ "body": "Provides files for release {tag}", "tag_name": "{tag}"}}\''
) # noqa: E501
print(command + ' -H "Authorization: token xxxx"')
return self.execution_api.execute_secure(
command=command + f' -H "Authorization: token {token}"',
sanitized_command=command + ' -H "Authorization: token xxxx"',
)
def add_asset_to_release(
self,
api_endpoint_url: str,
attachment: Path,
attachment_type: str,
token: str,
):
command = (
f'curl -X "POST" "{api_endpoint_url}"'
+ ' -H "accept: application/json"'
+ ' -H "Content-Type: multipart/form-data"'
+ f' -F "attachment=@{attachment};type={attachment_type}"'
) # noqa: E501
print(command + ' -H "Authorization: token xxxx"')
return self.execution_api.execute_secure(
command=command + f' -H "Authorization: token {token}"',
sanitized_command=command + ' -H "Authorization: token xxxx"',
)
def calculate_sha256(self, path: Path):
shasum = f"{path}.sha256"
self.execution_api.execute(
f"sha256sum {path} > {shasum}",
)
return shasum
def calculate_sha512(self, path: Path):
shasum = f"{path}.sha512"
self.execution_api.execute(
f"sha512sum {path} > {shasum}",
)
return shasum

@ -26,3 +26,8 @@ class ReleaseMixin(DevopsBuild):
devops = self.devops_repo.get_devops(self.project)
release = devops.mixins[MixinType.RELEASE]
self.release_service.tag_bump_and_push_release(release)
def publish_artifacts(self):
devops = self.devops_repo.get_devops(self.project)
release = devops.mixins[MixinType.RELEASE]
self.release_service.publish_artifacts(release)

@ -1,8 +1,11 @@
#
#deprecated, we recommend to use install_functions_debian.sh instead. We will going to remove install_functions.sh in a future release.
#
function upgradeSystem() {
export DEBIAN_FRONTEND=noninteractive
apt-get update > /dev/null
apt-get -y install apt-utils > /dev/null
apt-get -qqy dist-upgrade > /dev/null
{
apt-get update
apt-get -qqy upgrade
} > /dev/null
}
function cleanupDocker() {

@ -0,0 +1,21 @@
function upgradeSystem() {
apk -U upgrade
}
function cleanupDocker() {
rm -f /root/.ssh/authorized_keys
rm -f /root/.ssh/authorized_keys2
apk cache clean
rm -rf /tmp/*
find /var/cache -type f -exec rm -rf {} \;
find /var/log/ -name '*.log' -exec rm -f {} \;
}
function cleanupAmi() {
rm -f /home/ubuntu/.ssh/authorized_keys
rm -f /home/ubuntu/.ssh/authorized_keys2
cleanupDocker
}

@ -0,0 +1,25 @@
function upgradeSystem() {
apt-get update
apt-get -qqy upgrade
}
function cleanupDocker() {
rm -f /root/.ssh/authorized_keys
rm -f /root/.ssh/authorized_keys2
apt-get clean
apt-get -qqy autoremove --purge
apt-get -qqy autoclean
rm -rf /var/lib/apt/lists/
rm -rf /tmp/*
find /var/cache -type f -exec rm -rf {} \;
find /var/log/ -name '*.log' -exec rm -f {} \;
}
function cleanupAmi() {
rm -f /home/ubuntu/.ssh/authorized_keys
rm -f /home/ubuntu/.ssh/authorized_keys2
cleanupDocker
}

@ -1,18 +1,22 @@
import pytest
from pathlib import Path
from src.main.python.ddadevops.domain import (
ReleaseType,
ReleaseType,
MixinType,
)
from src.test.python.domain.helper import (
BuildFileRepositoryMock,
GitApiMock,
ArtifactDeploymentApiMock,
build_devops,
)
from src.main.python.ddadevops.application import ReleaseService
def test_sould_update_release_type():
sut = ReleaseService(GitApiMock(), BuildFileRepositoryMock("build.py"))
def test_should_update_release_type():
sut = ReleaseService(
GitApiMock(), ArtifactDeploymentApiMock(), BuildFileRepositoryMock("build.py")
)
devops = build_devops({})
release = devops.mixins[MixinType.RELEASE]
sut.update_release_type(release, "MAJOR")
@ -20,3 +24,40 @@ def test_sould_update_release_type():
with pytest.raises(Exception):
sut.update_release_type(release, "NOT_EXISTING")
def test_should_publish_artifacts():
mock = ArtifactDeploymentApiMock(release='{"id": 2345}')
sut = ReleaseService(GitApiMock(), mock, BuildFileRepositoryMock())
devops = build_devops(
{
"release_artifacts": ["target/art"],
"release_artifact_server_url": "http://repo.test/",
"release_organisation": "orga",
"release_repository_name": "repo",
}
)
release = devops.mixins[MixinType.RELEASE]
sut.publish_artifacts(release)
assert "http://repo.test/api/v1/repos/orga/repo/releases/2345/assets" == mock.add_asset_to_release_api_endpoint
def test_should_throw_exception_if_there_was_an_error_in_publish_artifacts():
devops = build_devops(
{
"release_artifacts": ["target/art"],
"release_artifact_server_url": "http://repo.test/",
"release_organisation": "orga",
"release_repository_name": "repo",
}
)
release = devops.mixins[MixinType.RELEASE]
with pytest.raises(Exception):
mock = ArtifactDeploymentApiMock(release='')
sut = ReleaseService(GitApiMock(), mock, BuildFileRepositoryMock())
sut.publish_artifacts(release)
with pytest.raises(Exception):
mock = ArtifactDeploymentApiMock(release='{"message": "there was an error", "url":"some-url"}')
sut = ReleaseService(GitApiMock(), mock, BuildFileRepositoryMock())
sut.publish_artifacts(release)

@ -53,6 +53,11 @@ def devops_config(overrides: dict) -> dict:
"release_current_branch": "my_feature",
"release_primary_build_file": "./package.json",
"release_secondary_build_file": [],
"release_artifacts": [],
"release_artifact_token": "release_artifact_token",
"release_artifact_server_url": None,
"release_organisation": None,
"release_repository_name": None,
"credentials_mappings": [
{
"gopass_path": "a/path",
@ -99,6 +104,9 @@ class EnvironmentApiMock:
def get(self, key):
return self.mappings.get(key, None)
def is_defined(self, key):
return key in self.mappings
class CredentialsApiMock:
def __init__(self, mappings):
@ -148,5 +156,33 @@ class GitApiMock:
def push(self):
pass
def push_follow_tags(self):
pass
def checkout(self, branch: str):
pass
class ArtifactDeploymentApiMock:
def __init__(self, release=""):
self.release = release
self.create_forgejo_release_count = 0
self.add_asset_to_release_count = 0
self.add_asset_to_release_api_endpoint = ""
def create_forgejo_release(self, api_endpoint: str, tag: str, token: str):
self.create_forgejo_release_count += 1
return self.release
def add_asset_to_release(
self, api_endpoint: str, attachment: str, attachment_type: str, token: str
):
self.add_asset_to_release_api_endpoint = api_endpoint
self.add_asset_to_release_count += 1
pass
def calculate_sha256(self, path: Path):
return f"{path}.sha256"
def calculate_sha512(self, path: Path):
return f"{path}.sha512"

@ -0,0 +1,32 @@
import pytest
from pybuilder.core import Project
from pathlib import Path
from src.main.python.ddadevops.domain import (
Validateable,
DnsRecord,
Devops,
BuildType,
MixinType,
Artifact,
Image,
)
from .helper import build_devops, devops_config
def test_should_validate_release():
sut = Artifact("x")
assert sut.is_valid()
sut = Artifact(None)
assert not sut.is_valid()
def test_should_calculate_type():
sut = Artifact("x.jar")
assert "application/x-java-archive" == sut.type()
sut = Artifact("x.js")
assert "application/x-javascript" == sut.type()
sut = Artifact("x.jar.sha256")
assert "text/plain" == sut.type()

@ -7,7 +7,7 @@ from src.main.python.ddadevops.domain import (
)
def test_sould_validate_build_file():
def test_should_validate_build_file():
sut = BuildFile(Path("./project.clj"), "content")
assert sut.is_valid()
@ -18,7 +18,7 @@ def test_sould_validate_build_file():
assert not sut.is_valid()
def test_sould_calculate_build_type():
def test_should_calculate_build_type():
sut = BuildFile(Path("./project.clj"), "content")
assert sut.build_file_type() == BuildFileType.JAVA_CLOJURE
@ -29,7 +29,7 @@ def test_sould_calculate_build_type():
assert sut.build_file_type() == BuildFileType.JS
def test_sould_parse_and_set_js():
def test_should_parse_and_set_js():
sut = BuildFile(
Path("./package.json"),
"""
@ -77,7 +77,7 @@ def test_sould_parse_and_set_js():
)
def test_sould_parse_and_set_version_for_gradle():
def test_should_parse_and_set_version_for_gradle():
sut = BuildFile(
Path("./build.gradle"),
"""
@ -97,7 +97,7 @@ version = "1.1.5-SNAPSHOT"
assert '\nversion = "2.0.0"\n' == sut.content
def test_sould_parse_and_set_version_for_py():
def test_should_parse_and_set_version_for_py():
sut = BuildFile(
Path("./build.py"),
"""
@ -143,7 +143,7 @@ version = "1.1.5-SNAPSHOT"
assert '\nversion = "2.0.0"\n' == sut.content
def test_sould_parse_and_set_version_for_clj():
def test_should_parse_and_set_version_for_clj():
sut = BuildFile(
Path("./project.clj"),
"""
@ -182,3 +182,43 @@ def test_sould_parse_and_set_version_for_clj():
'\n(defproject org.domaindrivenarchitecture/c4k-jira "2.0.0"\n:dependencies [[org.clojure/clojure "1.11.0"]]\n)\n '
== sut.content
)
def test_should_throw_for_clj_wrong_version():
sut = BuildFile(
Path("./project.clj"),
"""
(defproject org.domaindrivenarchitecture/c4k-jira "1.1.5-Snapshot"
:description "jira c4k-installation package"
:url "https://domaindrivenarchitecture.org"
)
""",
)
with pytest.raises(RuntimeError):
sut.get_version()
def test_should_ignore_first_version_for_py():
sut = BuildFile(
Path("./build.py"),
"""
from pybuilder.core import init, use_plugin, Author
use_plugin("python.core")
name = "ddadevops"
project_version = "0.0.2-dev1"
version = "1.1.5-dev12"
summary = "tools to support builds combining gopass, terraform, dda-pallet, aws & hetzner-cloud"
""",
)
assert sut.get_version() == Version.from_str("1.1.5-dev12", "dev")
def test_should_ignore_first_version_for_gradle():
sut = BuildFile(
Path("./build.gradle"),
"""
kotlin_version = "3.3.3"
version = "1.1.5-SNAPSHOT"
""",
)
assert sut.get_version() == Version.from_str("1.1.5-SNAPSHOT", "SNAPSHOT")

@ -4,10 +4,11 @@ from src.main.python.ddadevops.domain import (
Version,
BuildType,
MixinType,
Artifact,
)
def test_devops_factory():
def test_devops_creation():
with pytest.raises(Exception):
DevopsFactory().build_devops({"build_types": ["NOTEXISTING"]})
@ -50,6 +51,7 @@ def test_devops_factory():
assert sut is not None
assert sut.specialized_builds[BuildType.C4K] is not None
def test_release_devops_creation():
sut = DevopsFactory().build_devops(
{
"stage": "test",
@ -66,3 +68,32 @@ def test_devops_factory():
)
assert sut is not None
assert sut.mixins[MixinType.RELEASE] is not None
sut = DevopsFactory().build_devops(
{
"stage": "test",
"name": "mybuild",
"module": "test_image",
"project_root_path": "../../..",
"build_types": [],
"mixin_types": ["RELEASE"],
"release_main_branch": "main",
"release_current_branch": "my_feature",
"release_config_file": "project.clj",
"release_artifacts": ["x.jar"],
"release_artifact_token": "y",
"release_artifact_server_url": "https://repo.prod.meissa.de",
"release_organisation": "meissa",
"release_repository_name": "provs",
},
Version.from_str("1.0.0", "SNAPSHOT"),
)
release = sut.mixins[MixinType.RELEASE]
assert release is not None
assert Artifact("x.jar") == release.release_artifacts[0]
def test_on_merge_input_should_win():
sut = DevopsFactory()
assert {'tag': 'inp'} == sut.merge(inp = {'tag': 'inp'}, context = {'tag': 'context'}, authorization={})

@ -12,3 +12,16 @@ def test_devops_build_commons_path():
assert image is not None
assert image.is_valid()
assert "docker/" == image.build_commons_path()
def test_should_calculate_image_name():
sut = build_devops({})
image = sut.specialized_builds[BuildType.IMAGE]
assert "name" == image.image_name()
sut = build_devops({'image_naming': "NAME_ONLY"})
image = sut.specialized_builds[BuildType.IMAGE]
assert "name" == image.image_name()
sut = build_devops({'image_naming': "NAME_AND_MODULE"})
image = sut.specialized_builds[BuildType.IMAGE]
assert "name-module" == image.image_name()

@ -41,6 +41,8 @@ def test_should_calculate_backend_config():
{
"module": "dns_aws",
"stage": "prod",
"aws_access_key": "aws_access_key",
"aws_secret_key": "aws_secret_key",
"aws_bucket": "meissa-configuration",
"aws_bucket_kms_key_id": "arn:aws:kms:eu-central-1:907507348333:alias/meissa-configuration",
"aws_region": "eu-central-1",
@ -48,6 +50,8 @@ def test_should_calculate_backend_config():
)
)
assert {
"access_key": "aws_access_key",
"secret_key": "aws_secret_key",
"bucket": "meissa-configuration",
"key": "prod/dns_aws",
"kms_key_id": "arn:aws:kms:eu-central-1:907507348333:alias/meissa-configuration",

@ -1,3 +1,4 @@
import pytest
from pybuilder.core import Project
from pathlib import Path
from src.main.python.ddadevops.domain import (
@ -14,7 +15,7 @@ from src.main.python.ddadevops.domain import (
from .helper import build_devops, devops_config
def test_sould_validate_release():
def test_should_validate_release():
sut = Release(
devops_config(
{
@ -48,7 +49,7 @@ def test_sould_validate_release():
assert not sut.is_valid()
def test_sould_calculate_build_files():
def test_should_calculate_build_files():
sut = Release(
devops_config(
{
@ -61,3 +62,74 @@ def test_sould_calculate_build_files():
Version.from_str("1.3.1-SNAPSHOT", "SNAPSHOT"),
)
assert ["project.clj", "package.json"] == sut.build_files()
def test_should_calculate_forgejo_release_api_endpoint():
sut = Release(
devops_config(
{
"release_artifacts": [],
"release_artifact_token": "y",
"release_artifact_server_url": "https://repo.prod.meissa.de",
"release_organisation": "meissa",
"release_repository_name": "provs",
}
),
Version.from_str("1.3.1-SNAPSHOT", "SNAPSHOT"),
)
assert (
"https://repo.prod.meissa.de/api/v1/repos/meissa/provs/releases"
== sut.forgejo_release_api_endpoint()
)
sut = Release(
devops_config(
{
"release_artifacts": ["x"],
"release_artifact_token": "y",
"release_artifact_server_url": "https://repo.prod.meissa.de/",
"release_organisation": "/meissa/",
"release_repository_name": "provs",
}
),
Version.from_str("1.3.1-SNAPSHOT", "SNAPSHOT"),
)
assert (
"https://repo.prod.meissa.de/api/v1/repos/meissa/provs/releases"
== sut.forgejo_release_api_endpoint()
)
assert(
"/meissa/"
== sut.release_organisation
)
with pytest.raises(Exception):
sut = Release(
devops_config(
{
"release_artifact_server_url": "https://repo.prod.meissa.de",
"release_organisation": None,
"release_repository_name": "provs",
}
),
Version.from_str("1.3.1-SNAPSHOT", "SNAPSHOT"),
)
sut.forgejo_release_api_endpoint()
def test_should_calculate_forgejo_release_asset_api_endpoint():
sut = Release(
devops_config(
{
"release_artifacts": ["x"],
"release_artifact_token": "y",
"release_artifact_server_url": "https://repo.prod.meissa.de",
"release_organisation": "meissa",
"release_repository_name": "provs",
}
),
Version.from_str("1.3.1-SNAPSHOT", "SNAPSHOT"),
)
assert (
"https://repo.prod.meissa.de/api/v1/repos/meissa/provs/releases/123/assets"
== sut.forgejo_release_asset_api_endpoint(123)
)

@ -5,7 +5,9 @@ from pybuilder.core import Project
from src.main.python.ddadevops.release_mixin import ReleaseMixin
from src.main.python.ddadevops.domain import Devops, Release
from .domain.helper import devops_config
from src.main.python.ddadevops.application import ReleaseService
from src.main.python.ddadevops.infrastructure import BuildFileRepository
from .domain.helper import devops_config, GitApiMock, ArtifactDeploymentApiMock
from .resource_helper import copy_resource
@ -14,6 +16,8 @@ def test_release_mixin(tmp_path):
copy_resource(Path("package.json"), tmp_path)
project = Project(str_tmp_path, name="name")
os.environ["RELEASE_ARTIFACT_TOKEN"] = "ratoken"
sut = ReleaseMixin(
project,
devops_config(
@ -28,3 +32,37 @@ def test_release_mixin(tmp_path):
sut.initialize_build_dir()
assert sut.build_path() == f"{str_tmp_path}/target/name/release-test"
def test_release_mixin_different_version_suffixes(tmp_path):
str_tmp_path = str(tmp_path)
copy_resource(Path("config.py"), tmp_path)
copy_resource(Path("config.gradle"), tmp_path)
project = Project(str_tmp_path, name="name")
os.environ["RELEASE_ARTIFACT_TOKEN"] = "ratoken"
sut = ReleaseMixin(
project,
devops_config(
{
"project_root_path": str_tmp_path,
"mixin_types": ["RELEASE"],
"build_types": [],
"module": "release-test",
"release_current_branch": "main",
"release_main_branch": "main",
"release_primary_build_file": "config.py",
"release_secondary_build_files": ["config.gradle"],
}
),
)
sut.release_service = ReleaseService(GitApiMock(), ArtifactDeploymentApiMock(), BuildFileRepository(project.basedir))
sut.initialize_build_dir()
sut.update_release_type("PATCH")
sut.prepare_release()
sut.tag_bump_and_push_release()
assert sut.release_service.build_file_repository.get(Path("config.py")).get_version().to_string() == "3.1.5-dev"
assert sut.release_service.build_file_repository.get(Path("config.gradle")).get_version().to_string() == "3.1.5-SNAPSHOT"
Loading…
Cancel
Save