Compare commits

...

145 commits
4.7.0 ... main

Author SHA1 Message Date
3bc72f5bb8 bump version to: 4.13.2-dev 2024-08-26 18:59:29 +02:00
e9fdfdf520 release: 4.13.1 2024-08-26 18:59:29 +02:00
7fa2a8056d fix provision over dns 2024-08-26 18:59:11 +02:00
dfb46d76a5 bump version to: 4.13.1-dev 2024-08-22 08:25:43 +02:00
39c6b95af8 release: 4.13.0 2024-08-22 08:25:43 +02:00
0935eae193 pylintno longer works in our setup 2024-08-22 08:25:33 +02:00
58a1b005e9 add deps.edn 2024-08-22 08:16:41 +02:00
bba684d76e add deps.edn 2024-08-22 08:15:24 +02:00
ec150dde62 remove backup image 2024-08-21 17:46:12 +02:00
76d4ad16dc doc 2024-08-21 16:57:16 +02:00
126ae37845 backup integration test with bb - init starts to work 2024-08-16 14:41:39 +02:00
e9f1915655 Merge branch 'main' of ssh://repo.prod.meissa.de:2222/meissa/dda-devops-build 2024-08-06 14:41:45 +02:00
7448333d7c [Skip-CI] Fix mastodon add website link 2024-08-06 14:41:38 +02:00
bom
e8555798e4 bump version to: 4.12.2-dev 2024-06-28 12:01:48 +02:00
bom
cf4d1e450c release: 4.12.1 2024-06-28 12:01:48 +02:00
bom
a661eaf3ca Add optional release tag prefix
Used for go modules
2024-06-28 12:00:54 +02:00
f4da27f63f Merge branch 'main' of ssh://repo.prod.meissa.de:2222/meissa/dda-devops-build 2024-05-22 15:45:36 +02:00
e764534487 sequence chart retsic-management.clj 2024-05-22 15:42:55 +02:00
bom
6093d160e8 bump version to: 4.12.1-dev 2024-05-10 15:25:58 +02:00
bom
4d8dc95d8e release: 4.12.0 2024-05-10 15:25:58 +02:00
bom
e6f39eab21 Add support for hetzner csi 2024-05-10 15:24:29 +02:00
0cb4bc43f9 store state passwords in map & edn 2024-04-17 11:37:13 +02:00
5b5bc0ab96 install restic_management.clj to image 2024-04-17 11:36:24 +02:00
b139865f89 bump version to: 4.11.11-dev 2024-04-17 09:26:00 +02:00
e4fa06fc42 release: 4.11.10 2024-04-17 09:26:00 +02:00
4fa849b72b check with version 2024-04-17 09:20:57 +02:00
48bbbe6f6e refactoring clj-cljs image install.sh 2024-04-12 16:02:27 +02:00
bf843edb80 refactoring image install.sh checksum function graalvm kubeconform 2024-04-12 14:54:17 +02:00
5e8c21c521 dev-notes first steps 2024-04-12 11:25:25 +02:00
3bc3a0cd7e Statemachine credRot devnotes 2024-04-05 11:55:57 +02:00
56bc215f26 read / write state 2024-04-05 10:42:54 +02:00
678b75ae6f ignore 2024-04-05 10:05:12 +02:00
d133b281f9 refactoring installing ba*bash*ka 2024-03-26 16:38:12 +01:00
581449fba4 Merge branch 'main' of ssh://repo.prod.meissa.de:2222/meissa/dda-devops-build 2024-03-26 14:05:33 +01:00
b38876d9ef ba*bash*ka binary install to image 2024-03-26 14:04:06 +01:00
bom
90c4d4ec9d bump version to: 4.11.10-dev 2024-03-15 15:42:16 +01:00
bom
c0daa85612 release: 4.11.9 2024-03-15 15:42:16 +01:00
bom
0bdd13cf8a Add restic to base backup image 2024-03-15 15:34:56 +01:00
bom
1bba35963a bump version to: 4.11.9-dev 2024-03-15 13:54:27 +01:00
bom
2b7fe54f76 release: 4.11.8 2024-03-15 13:54:27 +01:00
e96581754c move backup image to devops-build 2024-03-06 10:31:40 +01:00
bom
4034b0022b bump version to: 4.11.8-dev 2024-02-24 12:12:14 +01:00
bom
87cc56cdd7 release: 4.11.7 2024-02-24 12:12:14 +01:00
bom
a9d98a6d0c Use returncode to check if process failed 2024-02-24 12:11:57 +01:00
bom
671a3b8cbb bump version to: 4.11.7-dev 2024-02-24 12:07:53 +01:00
bom
011fc848af release: 4.11.6 2024-02-24 12:07:53 +01:00
bom
c5af5c9198 Raise error if process fails in execute_live 2024-02-24 12:07:31 +01:00
bom
afec1fdd0c bump version to: 4.11.6-dev 2024-02-23 15:22:22 +01:00
bom
6513e00e54 release: 4.11.5 2024-02-23 15:22:22 +01:00
bom
3cc6206d06 Extend execute_live to handle input
use the new functionality for docker drun
2024-02-23 15:21:56 +01:00
48452baac5 bump version to: 4.11.5-dev 2024-02-17 11:53:52 +01:00
5875642777 release: 4.11.4 2024-02-17 11:53:52 +01:00
58d8d46a0c initialize jvm build tools 2024-02-17 11:45:36 +01:00
6e57204ce5 bump version to: 4.11.4-dev 2024-02-17 08:50:09 +01:00
d3f1204932 release: 4.11.3 2024-02-17 08:50:09 +01:00
305c9a6bd0 there is no jdk21 2024-02-17 08:49:38 +01:00
751bc26a21 bump version to: 4.11.3-dev 2024-02-16 18:41:28 +01:00
202c07150c release: 4.11.2 2024-02-16 18:41:28 +01:00
f5c75a31f5 use 4.10.7 2024-02-16 18:40:58 +01:00
7c24477348 java 21 2024-02-16 18:40:33 +01:00
237691f79f bump version to: 4.11.2-dev 2024-02-16 18:38:08 +01:00
c6c8de9b0f release: 4.11.1 2024-02-16 18:38:07 +01:00
a7a00756bc fix the bootstrap 2024-02-16 18:20:23 +01:00
e822e3d0f0 bump version to: 4.11.1-dev 2024-02-16 18:13:32 +01:00
bbd12cda4e release: 4.11.0 2024-02-16 18:13:32 +01:00
adcf93d321 fix java home 2024-02-16 18:12:30 +01:00
c9df6082ae Merge branch 'main' of ssh://repo.prod.meissa.de:2222/meissa/dda-devops-build 2024-02-16 18:05:32 +01:00
5825bcbf47 update graal version 2024-02-16 18:05:27 +01:00
ansgarz
b8f7b72a61 chg README.md 2024-02-02 10:08:21 +01:00
ansgarz
f4d30706b7 fix typo 2024-02-02 10:06:05 +01:00
ansgarz
72cd6a65d2 add link to example project to README.md 2024-01-26 10:37:07 +01:00
ansgarz
fde5061bf3 update docs 2024-01-12 11:34:38 +01:00
0dcb375e15 bump version to: 4.10.9-dev 2024-01-05 16:52:36 +01:00
c6872458e2 release: 4.10.8 2024-01-05 16:52:36 +01:00
a5a653b213 [skip-ci] infrastructure/../test folder removed 2023-12-22 16:54:55 +01:00
f12d43b9bc bump version to: 4.10.8-dev 2023-12-22 15:58:59 +01:00
5f18c7ddb3 release: 4.10.7 2023-12-22 15:58:58 +01:00
c92923f6b9 [skip-ci] fix 2023-12-22 15:58:20 +01:00
2aec5b44d2 fix:: ignore missing image/test folder 2023-12-22 15:46:11 +01:00
52622b1f2d bump version to: 4.10.7-dev 2023-12-22 15:17:49 +01:00
822bebc26f release: 4.10.6 2023-12-22 15:17:49 +01:00
0ba623c560 ignore missing image/test folder 2023-12-22 15:17:36 +01:00
147cb1bd72 bump version to: 4.10.6-dev 2023-12-21 14:22:41 +01:00
56dc19322a release: 4.10.5 2023-12-21 14:22:41 +01:00
ee20925e04 gitlabci.yml use dind/python 4.10.5 2023-12-21 14:22:29 +01:00
2455ea0ff0 bump version to: 4.10.5-dev 2023-12-21 14:10:14 +01:00
739940cdd3 release: 4.10.4 2023-12-21 14:10:14 +01:00
47dc81860a bump version to: 4.10.4-dev 2023-12-15 17:28:45 +01:00
308c86595c release: 4.10.3 2023-12-15 17:28:45 +01:00
45745f3ebd fix gitlab-ci.yml version 2023-12-15 17:26:50 +01:00
5b5249eb55 usage of install_functions_debian/alpine.sh 2023-12-15 17:21:43 +01:00
d2a3e60edf bump version to: 4.10.3-dev 2023-12-15 16:22:54 +01:00
8324727b8a release: 4.10.2 2023-12-15 16:22:53 +01:00
c79c0c1a7f bump version to: 4.10.2-dev 2023-12-15 16:13:59 +01:00
d5c9bae212 release: 4.10.1 2023-12-15 16:13:59 +01:00
2914be8a88 better structuring to docker image building 2023-12-15 16:11:49 +01:00
bfc55293dc Adapt usage ddadevops-dind/python version 2023-12-07 21:04:10 +01:00
9a0573bc33 bump version to: 4.10.1-dev 2023-12-07 20:41:59 +01:00
701047fd4f release: 4.10.0 2023-12-07 20:41:59 +01:00
9638450ef6 Improvements docker image building 2023-12-07 20:38:43 +01:00
d90f68ce1a Merge branch 'main' of ssh://repo.prod.meissa.de:2222/meissa/dda-devops-build 2023-12-01 11:19:18 +01:00
f3f51b165a use execution_api for self checking docker image 2023-12-01 11:18:02 +01:00
bom
29d439e82a bump version to: 4.9.4-dev 2023-12-01 11:10:27 +01:00
bom
4d8961557c release: 4.9.3 2023-12-01 11:10:27 +01:00
bom
3e49e31e66 Add kotlin image to ci 2023-12-01 11:09:39 +01:00
bom
5053b79ad4 Make version regex for python and gradle more concrete
Check that "version" is the start of the string to avoid changing cases like
"kotlin_version = ..."
2023-11-24 22:28:39 +01:00
bom
215a9bf0fe Add regression test for malformed version in clj 2023-11-17 15:04:36 +01:00
bom
97978e9950 bump version to: 4.9.3-dev 2023-11-17 15:01:00 +01:00
bom
7375ba16cc release: 4.9.2 2023-11-17 15:01:00 +01:00
bom
07cf837ac6 Simplify set/get version functions 2023-11-17 15:00:33 +01:00
bom
6399a1dfeb Update regexes for gradle and python files 2023-11-17 14:58:53 +01:00
bom
36df9f486b Cleanup
Fix typos, remove old todos
2023-11-17 14:45:38 +01:00
bom
617d909438 Replace clojure version regex 2023-11-17 14:42:47 +01:00
bom
bcccfd8b9c Move regex string to a function
Avoids having duplicates in similar functions
2023-11-17 13:57:11 +01:00
bom
f7897a574d bump version to: 4.9.2-dev 2023-11-17 13:04:40 +01:00
bom
c6217bd0a2 release: 4.9.1 2023-11-17 13:04:40 +01:00
bom
e5d1203435 Update version suffix to fit build file type
Resolves a bug where when using "build.py" as the primary build file
and some other secondary file like "build.gradle",
would result in the ".gradle" file version having a "-dev" suffix
 instead of "-SNAPSHOT"

Includes regression test
2023-11-17 13:03:09 +01:00
bom
78824ea38b Extend GitApiMock with push_follow_tags 2023-11-17 12:50:20 +01:00
288247be8e bump version to: 4.9.1-dev 2023-10-25 09:26:56 +02:00
bc06c34ea3 release: 4.9.0 2023-10-25 09:26:56 +02:00
03c4229cf0 Merge branch 'main' of ssh://repo.prod.meissa.de:2222/meissa/dda-devops-build 2023-10-25 09:25:22 +02:00
32ae4f2a6f bump version to: 4.8.1-dev 2023-10-25 09:23:51 +02:00
3bbae609d7 release: 4.8.0 2023-10-25 09:23:51 +02:00
jem
b8f6129146 Merge pull request 'kotlin-compile' (#1) from kotlin-compile into main
Reviewed-on: #1
2023-10-25 07:20:12 +00:00
ef2efc40f7 updates for kotlin build 2023-10-25 09:18:41 +02:00
b3a612c938 Merge branch 'main' into kotlin-compile 2023-10-25 09:15:40 +02:00
Clemens
edd2ae5743 bump version to: 4.7.5-dev 2023-10-13 09:51:23 +02:00
Clemens
fc92260a43 release: 4.7.4 2023-10-13 09:51:23 +02:00
7e5e66d933 fix doc 2023-09-29 09:52:37 +02:00
9fdd81d4b0 bump version to: 4.7.4-dev 2023-09-22 18:08:43 +02:00
5d81903870 release: 4.7.3 2023-09-22 18:08:42 +02:00
d643bba325 add some cleanup 2023-09-22 17:49:37 +02:00
4c0524aafe bump version to: 4.7.3-dev 2023-09-22 17:46:58 +02:00
1db263d13f release: 4.7.2 2023-09-22 17:46:58 +02:00
58bfd98af9 bump version to: 4.7.2-dev 2023-09-22 17:38:34 +02:00
d3e8c19f02 release: 4.7.1 2023-09-22 17:38:34 +02:00
bdca4f224f add new function to doc 2023-08-17 18:28:17 +02:00
331c57a952 gitlab ci is no longer used 2023-08-17 18:25:59 +02:00
cea54b0945 added doc for creating artifacts 2023-08-17 18:23:33 +02:00
d8396402b5 bootstrap with dev no longer needed 2023-08-17 17:28:01 +02:00
1afa34dba3 required for releasing 2023-08-17 17:25:55 +02:00
a5923aef5f use released build-container 2023-08-17 17:25:42 +02:00
c669d8f7b5 bump version to: 4.7.1-dev 2023-08-17 17:15:18 +02:00
00202edecc add kotlin image 2023-08-11 13:35:15 +02:00
31ac285df0 add kotlin image 2023-08-08 09:04:28 +02:00
65 changed files with 1206 additions and 289 deletions

View file

@ -1,42 +0,0 @@
name: stable
on:
push:
tags:
- '[0-9]+.[0-9]+.[0-9]+'
jobs:
build:
name: stable build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Use python 3.x
uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: build stable release
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_DDA }}
run: |
pyb -P version=${{ github.ref }} publish upload
- name: Create GH Release
id: create_release
uses: actions/create-release@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ github.ref }}
release_name: Release ${{ github.ref }}
draft: false
prerelease: false

View file

@ -1,30 +0,0 @@
name: unstable
on:
push:
tags:
- '![0-9]+.[0-9]+.[0-9]+'
jobs:
build:
name: unstable
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Use python 3.x
uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: build unstable release
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_DDA }}
run: |
pyb publish upload

3
.gitignore vendored
View file

@ -109,3 +109,6 @@ venv.bak/
.clj-kondo/
.lsp/
.calva/
.cpcache/
infrastructure/backup/image/resources/backup-repository-state.edn

View file

@ -4,14 +4,14 @@ stages:
- image
.py: &py
image: "domaindrivenarchitecture/ddadevops-python:4.5.5-dev2023-08-17-17-07-54"
image: "domaindrivenarchitecture/ddadevops-python:4.10.7"
before_script:
- export RELEASE_ARTIFACT_TOKEN=$MEISSA_REPO_BUERO_RW
- python --version
- pip install -r requirements.txt
.img: &img
image: "domaindrivenarchitecture/ddadevops-dind:4.1.0"
image: "domaindrivenarchitecture/ddadevops-dind:4.10.7"
services:
- docker:dind
before_script:
@ -80,3 +80,10 @@ ddadevops-image-publish:
stage: image
script:
- cd infrastructure/ddadevops && pyb image publish
kotlin-image-publish:
<<: *img
<<: *tag_only
stage: image
script:
- cd infrastructure/kotlin && pyb image publish

View file

@ -1,8 +1,7 @@
# dda-devops-build
[![Slack](https://img.shields.io/badge/chat-clojurians-green.svg?style=flat)](https://clojurians.slack.com/messages/#dda-pallet/) | [<img src="https://meissa-gmbh.de/img/community/Mastodon_Logotype.svg" width=20 alt="team@social.meissa-gmbh.de"> team@social.meissa-gmbh.de](https://social.meissa-gmbh.de/@team) | [Website & Blog](https://domaindrivenarchitecture.org)
[![Slack](https://img.shields.io/badge/chat-clojurians-green.svg?style=flat)](https://clojurians.slack.com/messages/#dda-pallet/) | [<img src="https://domaindrivenarchitecture.org/img/delta-chat.svg" width=20 alt="DeltaChat"> chat over e-mail](mailto:buero@meissa-gmbh.de?subject=community-chat) | [<img src="https://meissa.de/images/parts/contact/mastodon36_hue9b2464f10b18e134322af482b9c915e_5501_filter_14705073121015236177.png" width=20 alt="M"> meissa@social.meissa-gmbh.de](https://social.meissa-gmbh.de/@meissa) | [Blog](https://domaindrivenarchitecture.org) | [Website](https://meissa.de)
![release prod](https://github.com/DomainDrivenArchitecture/dda-devops-build/workflows/release%20prod/badge.svg)
dda-devops-build integrates all the tools we use to work with clouds & provide some nice functions around.
@ -84,6 +83,10 @@ Principles we follow are:
* Seperate build artefacts from version controlled code
* Domain Driven Design - in order to stay sustainable
## Example Project
An example project which is using dda-devops-build can be found at: https://repo.prod.meissa.de/meissa/buildtest
## Installation
Ensure that yout python3 version is at least Python 3.10
@ -94,17 +97,9 @@ pip3 install -r requirements.txt
export PATH=$PATH:~/.local/bin
```
## Reference
## Example Project
* [DevopsBuild](./doc/DevopsBuild.md)
* [DevopsImageBuild](./doc/DevopsImageBuild.md)
* [DevopsTerraformBuild](./doc/DevopsTerraformBuild.md)
* [AwsProvider](doc/DevopsTerraformBuildWithAwsProvider.md)
* [DigitaloceanProvider](doc/DevopsTerraformBuildWithDigitaloceanProvider.md)
* [HetznerProvider](doc/DevopsTerraformBuildWithHetznerProvider.md)
* [ReleaseMixin](./doc/ReleaseMixin.md)
* [ProvsK3sBuild](doc/ProvsK3sBuild.md)
* [C4kBuild](doc/C4kBuild.md)
An example project which is using dda-devops-build can be found at: https://repo.prod.meissa.de/meissa/buildtest
## Example Build
@ -190,6 +185,19 @@ pyb [patch|minor|major]
pip3 install --upgrade ddadevops
```
## Reference
* [DevopsBuild](./doc/DevopsBuild.md)
* [DevopsImageBuild](./doc/DevopsImageBuild.md)
* [DevopsTerraformBuild](./doc/DevopsTerraformBuild.md)
* [AwsProvider](doc/DevopsTerraformBuildWithAwsProvider.md)
* [DigitaloceanProvider](doc/DevopsTerraformBuildWithDigitaloceanProvider.md)
* [HetznerProvider](doc/DevopsTerraformBuildWithHetznerProvider.md)
* [ReleaseMixin](./doc/ReleaseMixin.md)
* [ProvsK3sBuild](doc/ProvsK3sBuild.md)
* [C4kBuild](doc/C4kBuild.md)
## Development & mirrors
Development happens at: https://repo.prod.meissa.de/meissa/dda-devops-build
@ -205,8 +213,3 @@ For more details about our repository model see: https://repo.prod.meissa.de/mei
Copyright © 2021 meissa GmbH
Licensed under the [Apache License, Version 2.0](LICENSE) (the "License")
## License
Copyright © 2023 meissa GmbH
Licensed under the [Apache License, Version 2.0](LICENSE) (the "License")

View file

@ -33,7 +33,7 @@ default_task = "dev"
name = "ddadevops"
MODULE = "not-used"
PROJECT_ROOT_PATH = "."
version = "4.7.0"
version = "4.13.2-dev"
summary = "tools to support builds combining gopass, terraform, dda-pallet, aws & hetzner-cloud"
description = __doc__
authors = [Author("meissa GmbH", "buero@meissa-gmbh.de")]
@ -46,7 +46,7 @@ license = "Apache Software License"
def initialize(project):
# project.build_depends_on('mockito')
# project.build_depends_on('unittest-xml-reporting')
project.build_depends_on("ddadevops>=4.0.0")
project.build_depends_on("ddadevops>=4.7.0")
project.set_property("verbose", True)
project.get_property("filter_resources_glob").append(
@ -102,6 +102,7 @@ def initialize(project):
"infrastructure/ddadevops/build.py",
"infrastructure/clj-cljs/build.py",
"infrastructure/clj/build.py",
"infrastructure/kotlin/build.py",
],
"release_artifacts": [],
"release_artifact_server_url": "https://repo.prod.meissa.de",
@ -139,13 +140,7 @@ def lint(project):
shell=True,
check=True,
)
run(
"pylint -d W0511,R0903,C0301,W0614,C0114,C0115,C0116,similarities,W1203,W0702,W0702,"
+ "R0913,R0902,R0914,R1732,R1705,W0707,C0123,W0703,C0103 src/main/python/ddadevops/",
shell=True,
check=True,
)
@task
def patch(project):

View file

@ -35,7 +35,12 @@ classDiagram
| name | name in context of build & ENV | - | |
## Example Usage
### build.py
### Example project
A complete example project you can find on: https://repo.prod.meissa.de/meissa/buildtest
### Example of a build.py
```python
from os import environ

View file

@ -23,12 +23,9 @@ classDiagram
| build_dir_name | name of dir, build is executed in | target |
| build_types | list of special builds used. Valid values are ["IMAGE", "C4K", "K3S", "TERRAFORM"] | [] |
| mixin_types | mixins are orthoganl to builds and represent additional capabilities. Valid Values are ["RELEASE"] | [] |
| module | module name - may result in a hierarchy like name/module | |
| name | dedicated name of the build | module |
| project_root_path | relative path to projects root. Is used to locate the target dir | |
| stage | sth. like test, int, acc or prod | |
## Example Usage
### build.py
```python

View file

@ -30,7 +30,7 @@ classDiagram
| image_dockerhub_user | user to access docker-hub | IMAGE_DOCKERHUB_USER from env or credentials from gopass |
| image_dockerhub_password | password to access docker-hub | IMAGE_DOCKERHUB_PASSWORD from env or credentials from gopass |
| image_tag | tag for publishing the image | IMAGE_TAG from env |
| image_naming | Strategy for calculate the image name. Posible values are [NAME_ONLY,NAME_AND_MODULE] |NAME_ONLY |
### Credentials Mapping defaults

View file

@ -13,14 +13,17 @@ classDiagram
## Input
| name | description | default |
| ----------------------------- | ----------------------------------------------------------------- | --------- |
| k3s_provision_user | the user used to provision k3s | "root" |
| k3s_letsencrypt_email | email address used for letsencrypt | |
| k3s_letsencrypt_endpoint | letsencrypt endpoint. Valid values are staging, prod | "staging" |
| k3s_app_filename_to_provision | an k8s manifest to apply imediately after k3s setup was sucessful | |
| k3s_enable_echo | provision the echo app on k3s. Valid values are true, false | "false" |
| k3s_provs_template | use a individual template for provs config | None |
| name | description | default |
| --------------------------------- | ----------------------------------------------------------------- | --------- |
| k3s_provision_user | the user used to provision k3s | "root" |
| k3s_letsencrypt_email | email address used for letsencrypt | |
| k3s_letsencrypt_endpoint | letsencrypt endpoint. Valid values are staging, prod | "staging" |
| k3s_app_filename_to_provision | an k8s manifest to apply imediately after k3s setup was sucessful | |
| k3s_enable_echo | provision the echo app on k3s. Valid values are true, false | "false" |
| k3s_provs_template | use a individual template for provs config | None |
| k3s_enable_hetzner_csi | enable hetzner csi | False |
| k3s_hetzner_api_token | hetzner_api_token | None |
| k3s_hetzner_encryption_passphrase | encryption passphrase for volumes | None |
### Credentials Mapping defaults

View file

@ -1,5 +1,15 @@
# ReleaseMixin
- [ReleaseMixin](#releasemixin)
- [Input](#input)
- [Example Usage just for creating releases](#example-usage-just-for-creating-releases)
- [build.py](#buildpy)
- [call the build for creating a major release](#call-the-build-for-creating-a-major-release)
- [Example Usage for creating a release on forgejo / gitea \& upload the generated artifacts](#example-usage-for-creating-a-release-on-forgejo--gitea--upload-the-generated-artifacts)
- [build.py](#buildpy-1)
- [call the build](#call-the-build)
Support for releases following the trunk-based-release flow (see https://trunkbaseddevelopment.com/)
```mermaid
@ -8,6 +18,7 @@ classDiagram
prepare_release() - adjust all build files to carry the correct version & commit locally
tag_and_push_release() - tag the git repo and push changes to origin
update_release_type (release_type) - change the release type during run time
publish_artifacts() - publish release & artifacts to forgejo/gitea
}
```
@ -15,13 +26,103 @@ classDiagram
## Input
| name | description | default |
| ----------------------------- | --------------------------------------------------------------------------------------------------------------------- | --------------- |
| ----------------------------- |-----------------------------------------------------------------------------------------------------------------------| --------------- |
| release_type | one of MAJOR, MINOR, PATCH, NONE | "NONE" |
| release_main_branch | the name of your trank | "main" |
| release_main_branch | the name of your trunk | "main" |
| release_primary_build_file | path to the build file having the leading version info (read & write). Valid extensions are .clj, .json, .gradle, .py | "./project.clj" |
| release_secondary_build_files | list of secondary build files, version is written in. | [] |
| release_artifact_server_url | Optional: The base url of your forgejo/gitea instance to publish a release tode | |
| release_organisation | Optional: The repository organisation name | |
| release_repository_name | Optional: The repository name name | |
| release_artifacts | Optional: The list of artifacts to publish to the release generated name | [] |
| release_tag_prefix | Optional: Prefix of tag | "" |
## Example Usage
## Example Usage just for creating releases
### build.py
```python
from os import environ
from pybuilder.core import task, init
from ddadevops import *
name = 'my-project'
MODULE = 'my-module'
PROJECT_ROOT_PATH = '..'
@init
def initialize(project):
project.build_depends_on("ddadevops>=4.7.0")
input = {
"name": name,
"module": MODULE,
"stage": "notused",
"project_root_path": PROJECT_ROOT_PATH,
"build_types": [],
"mixin_types": ["RELEASE"],
"release_type": "MINOR",
"release_primary_build_file": "project.clj",
"release_secondary_build_files": ["package.json"],
}
build = ReleaseMixin(project, input)
build.initialize_build_dir()
@task
def patch(project):
linttest(project, "PATCH")
release(project)
@task
def minor(project):
linttest(project, "MINOR")
release(project)
@task
def major(project):
linttest(project, "MAJOR")
release(project)
@task
def dev(project):
linttest(project, "NONE")
@task
def prepare(project):
build = get_devops_build(project)
build.prepare_release()
@task
def tag(project):
build = get_devops_build(project)
build.tag_bump_and_push_release()
def release(project):
prepare(project)
tag(project)
def linttest(project, release_type):
build = get_devops_build(project)
build.update_release_type(release_type)
#test(project)
#lint(project)
```
### call the build for creating a major release
```bash
pyb major
```
## Example Usage for creating a release on forgejo / gitea & upload the generated artifacts
### build.py
@ -36,7 +137,7 @@ PROJECT_ROOT_PATH = '..'
@init
def initialize(project):
project.build_depends_on("ddadevops>=4.0.0")
project.build_depends_on("ddadevops>=4.7.0")
input = {
"name": name,
@ -48,35 +149,23 @@ def initialize(project):
"release_type": "MINOR",
"release_primary_build_file": "project.clj",
"release_secondary_build_files": ["package.json"],
"release_artifact_server_url": "https://repo.prod.meissa.de",
"release_organisation": "meissa",
"release_repository_name": "dda-devops-build",
"release_artifacts": ["target/doc.zip"],
}
roject.build_depends_on("ddadevops>=4.0.0-dev")
build = ReleaseMixin(project, input)
build.initialize_build_dir()
@task
def prepare_release(project):
def publish_artifacts(project):
build = get_devops_build(project)
build.prepare_release()
@task
def build(project):
print("do the build")
@task
def publish(project):
print("publish your artefacts")
@task
def after_publish(project):
build = get_devops_build(project)
build.tag_bump_and_push_release()
build.publish_artifacts()
```
### call the build
```bash
pyb prepare_release build publish after_publish
git checkout "4.7.0"
pyb publish_artifacts
```

View file

@ -0,0 +1,56 @@
from os import environ
from datetime import datetime
from pybuilder.core import task, init
from ddadevops import *
import logging
name = 'dda-backup'
MODULE = 'NOT_SET'
PROJECT_ROOT_PATH = '../..'
version = "4.12.2-dev"
@init
def initialize(project):
image_tag = version
if "dev" in image_tag:
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
input = {
"name": name,
"module": MODULE,
"stage": "notused",
"project_root_path": PROJECT_ROOT_PATH,
"build_types": ["IMAGE"],
"mixin_types": [],
"image_naming": "NAME_ONLY",
"image_tag": f"{image_tag}",
}
project.build_depends_on("ddadevops>=4.7.0")
build = DevopsImageBuild(project, input)
build.initialize_build_dir()
@task
def image(project):
build = get_devops_build(project)
build.image()
@task
def test(project):
build = get_devops_build(project)
build.test()
@task
def drun(project):
build = get_devops_build(project)
build.drun()
@task
def publish(project):
build = get_devops_build(project)
build.dockerhub_login()
build.dockerhub_publish()

View file

@ -0,0 +1,79 @@
## Init Statemachine
### Inputs
1. `restic-password: ""`
2. `restic-password-to-rotate: ""`
### Manual init the restic repository for the first time
1. apply backup-and-restore pod:
`kubectl scale deployment backup-restore --replicas=1`
2. exec into pod and execute restore pod (press tab to get your exact pod name)
`kubectl exec -it backup-restore-... -- /usr/local/bin/init.sh`
3. remove backup-and-restore pod:
`kubectl scale deployment backup-restore --replicas=0`
### Password Rotation
1. apply backup-and-restore pod:
`kubectl scale deployment backup-restore --replicas=1`
2. add new password to restic repository
`restic key add ....`
=> Trigger ::
field (1) credential current
filed (2) credential new
3. replace field (1) with (2) & clear (2)
4. remove old key - ???
`restic remove ....`
```mermaid
stateDiagram-v2
[*] --> init
init --> backup_ready: trigger, restic-password !empty
backup_ready --> new_password_added: restic-password !empty && restic-password-to-rotate !empty
new_password_added --> backup_ready: restic-password !empty && restic-password-to-rotate empty
```
### First Steps
1. Cloud Testserver hochfahren
2. Dort backup-restore deployment (leeres Secret mgl.?), neues Secret "rotation-credential-secret" als Daten
3. mounten von angelegtem Secret in Pod backup-restore
4. ba*bash*ka Skript in pod starten -> liest Secret ?leer
5. Micha cons.
```mermaid
sequenceDiagram
participant k8s
participant e as entrypoint.sh
participant rm as restic-management.clj
k8s ->> e: cronjob calls
e ->> rm: start-file
rm ->> rm: rotate
activate rm
rm ->> rm: read-backup-repository-state (state)
rm ->> rm: read-secret (backup-secret/restic-password, rotation-credential-secret/rotation-credential)
rm ->> rm: switch
activate rm
rm ->> rm: if init && restic-password != null
activate rm
rm ->> rm: init.sh
rm ->> rm: state init -> backup-ready
deactivate rm
rm ->> rm: if backup-ready && rotation-credential != null
activate rm
rm ->> rm: add-new-password-to-restic-repository.sh
rm ->> rm: state backup-ready -> new-password-added
deactivate rm
rm ->> rm: if new-password-added && rotation-credential == null
activate rm
rm ->> rm: remove-old-password-from-restic-repository.sh
rm ->> rm: state new-password-added -> backup-ready
deactivate rm
deactivate rm
rm ->> rm: store-repository-state (state)
deactivate rm
```

View file

@ -0,0 +1,5 @@
FROM ubuntu:jammy
# install it
ADD resources /tmp/
RUN /tmp/install.sh

View file

@ -0,0 +1,70 @@
backup_file_path='files'
function init-file-repo() {
if [ -z ${CERTIFICATE_FILE} ];
then
restic -r ${RESTIC_REPOSITORY}/${backup_file_path} -v init
else
restic -r ${RESTIC_REPOSITORY}/${backup_file_path} -v init --cacert ${CERTIFICATE_FILE}
fi
}
# First arg is the directory, second is optional for the path to a certificate file
function backup-directory() {
local directory="$1"; shift
if [ -z ${CERTIFICATE_FILE} ];
then
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache
cd ${directory} && restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} backup .
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune
else
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache --cacert ${CERTIFICATE_FILE}
cd ${directory} && restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} backup . --cacert ${CERTIFICATE_FILE}
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune --cacert ${CERTIFICATE_FILE}
fi
}
# First arg is the directory, the remaining args are the sub-directories (relative to the first directory) to backup.
function backup-fs-from-directory() {
local directory="$1"; shift
if [ -z ${CERTIFICATE_FILE} ];
then
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache
cd ${directory} && restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} backup $@
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune
else
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache --cacert ${CERTIFICATE_FILE}
cd ${directory} && restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} backup $@ --cacert ${CERTIFICATE_FILE}
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune --cacert ${CERTIFICATE_FILE}
fi
}
# Das tut so nicht!
function restore-directory() {
local directory="$1"; shift
local snapshot_id="${1:-latest}"; shift
if [ -z ${CERTIFICATE_FILE} ];
then
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache
rm -rf ${directory}*
restic -v -r $RESTIC_REPOSITORY/${backup_file_path} restore ${snapshot_id} --target ${directory}
else
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache --cacert ${CERTIFICATE_FILE}
rm -rf ${directory}*
restic -v -r $RESTIC_REPOSITORY/${backup_file_path} restore ${snapshot_id} --target ${directory} --cacert ${CERTIFICATE_FILE}
fi
}
function list-snapshot-files() {
if [ -z ${CERTIFICATE_FILE} ];
then
restic -r ${RESTIC_REPOSITORY}/${backup_file_path} snapshots
else
restic -r ${RESTIC_REPOSITORY}/${backup_file_path} snapshots --cacert ${CERTIFICATE_FILE}
fi
}

View file

@ -0,0 +1,21 @@
# usage: file_env VAR [DEFAULT]
# ie: file_env 'XYZ_DB_PASSWORD' 'example'
# (will allow for "$XYZ_DB_PASSWORD_FILE" to fill in the value of
# "$XYZ_DB_PASSWORD" from a file, especially for Docker's secrets feature)
function file_env() {
local var="$1"
local fileVar="${var}_FILE"
local def="${2:-}"
if [ "${!var:-}" ] && [ "${!fileVar:-}" ]; then
echo >&2 "error: both $var and $fileVar are set (but are exclusive)"
exit 1
fi
local val="$def"
if [ "${!var:-}" ]; then
val="${!var}"
elif [ "${!fileVar:-}" ]; then
val="$(< "${!fileVar}")"
fi
export "$var"="$val"
unset "$fileVar"
}

View file

@ -0,0 +1,36 @@
#!/bin/bash
set -exo pipefail
function babashka_install() {
babashka_version="1.3.189"
curl -SsLo /tmp/babashka-${babashka_version}-linux-amd64.tar.gz https://github.com/babashka/babashka/releases/download/v${babashka_version}/babashka-${babashka_version}-linux-amd64.tar.gz
curl -SsLo /tmp/checksum https://github.com/babashka/babashka/releases/download/v${babashka_version}/babashka-${babashka_version}-linux-amd64.tar.gz.sha256
echo " /tmp/babashka-$babashka_version-linux-amd64.tar.gz"|tee -a /tmp/checksum
sha256sum -c --status /tmp/checksum
tar -C /tmp -xzf /tmp/babashka-${babashka_version}-linux-amd64.tar.gz
install -m 0700 -o root -g root /tmp/bb /usr/local/bin/
}
function main() {
{
upgradeSystem
apt-get install -qqy ca-certificates curl gnupg postgresql-client-14 restic
curl -Ss --fail https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /etc/apt/trusted.gpg.d/postgresql-common_pgdg_archive_keyring.gpg
sh -c 'echo "deb [signed-by=/etc/apt/trusted.gpg.d/postgresql-common_pgdg_archive_keyring.gpg] https://apt.postgresql.org/pub/repos/apt jammy-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
upgradeSystem
babashka_install
} > /dev/null
update-ca-certificates
install -m 0400 /tmp/functions.sh /usr/local/lib/
install -m 0400 /tmp/pg-functions.sh /usr/local/lib/
install -m 0400 /tmp/file-functions.sh /usr/local/lib/
install -m 0740 /tmp/restic_management.clj /usr/local/bin/
cleanupDocker
}
source /tmp/install_functions_debian.sh
DEBIAN_FRONTEND=noninteractive DEBCONF_NOWARNINGS=yes main

View file

@ -0,0 +1,149 @@
backup_pg_role_path='pg-role'
backup_pg_database_path='pg-database'
function init-command() {
restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} -v init $@
}
function init-role-repo() {
if [ -z ${CERTIFICATE_FILE} ];
then
init-command
else
init-command --cacert ${CERTIFICATE_FILE}
fi
}
function init-database-command() {
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} -v init $@
}
function init-database-repo() {
if [ -z ${CERTIFICATE_FILE} ];
then
init-database-command
else
init-database-command --cacert ${CERTIFICATE_FILE}
fi
}
function drop-create-db() {
psql -d template1 -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
--no-password -c "DROP DATABASE \"${POSTGRES_DB}\";"
psql -d template1 -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
--no-password -c "CREATE DATABASE \"${POSTGRES_DB}\";"
}
function create-pg-pass() {
local pg_host=${POSTGRES_HOST:-localhost}
echo "${pg_host}:${POSTGRES_DB}:${POSTGRES_USER}:${POSTGRES_PASSWORD}" > /root/.pgpass
echo "${POSTGRES_HOST}:template1:${POSTGRES_USER}:${POSTGRES_PASSWORD}" >> /root/.pgpass
chmod 0600 /root/.pgpass
}
function roles-unlock-command() {
restic -v -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} unlock --cleanup-cache $@
}
function roles-forget-command() {
restic -v -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune $@
}
function backup-roles() {
local role_prefix="$1"; shift
if [ -z ${CERTIFICATE_FILE} ];
then
roles-unlock-command
pg_dumpall -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U${POSTGRES_USER} --no-password --roles-only | \
grep ${role_prefix} | restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} backup --stdin
roles-forget-command
else
roles-unlock-command --cacert ${CERTIFICATE_FILE}
pg_dumpall -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U${POSTGRES_USER} --no-password --roles-only | \
grep ${role_prefix} | restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} backup --stdin --cacert ${CERTIFICATE_FILE}
roles-forget-command --cacert ${CERTIFICATE_FILE}
fi
}
function db-unlock-command() {
restic -v -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} unlock --cleanup-cache $@
}
function db-forget-command() {
restic -v -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune $@
}
function backup-db-dump() {
if [ -z ${CERTIFICATE_FILE} ];
then
db-unlock-command
pg_dump -d ${POSTGRES_DB} -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} \
-U ${POSTGRES_USER} --no-password --serializable-deferrable | \
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} backup --stdin
db-forget-command
else
db-unlock-command --cacert ${CERTIFICATE_FILE}
pg_dump -d ${POSTGRES_DB} -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} \
-U ${POSTGRES_USER} --no-password --serializable-deferrable | \
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} backup --stdin --cacert ${CERTIFICATE_FILE}
db-forget-command --cacert ${CERTIFICATE_FILE}
fi
}
function restore-roles() {
local snapshot_id="${1:-latest}"; shift
if [ -z ${CERTIFICATE_FILE} ];
then
roles-unlock-command
restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} dump ${snapshot_id} stdin | \
psql -d template1 -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
--no-password
else
roles-unlock-command --cacert ${CERTIFICATE_FILE}
restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} dump ${snapshot_id} stdin --cacert ${CERTIFICATE_FILE} | \
psql -d template1 -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
--no-password
fi
}
function restore-db() {
local snapshot_id="${1:-latest}"; shift
if [ -z ${CERTIFICATE_FILE} ];
then
db-unlock-command
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} dump ${snapshot_id} stdin | \
psql -d ${POSTGRES_DB} -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
--no-password
else
db-unlock-command --cacert ${CERTIFICATE_FILE}
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} dump ${snapshot_id} stdin --cacert ${CERTIFICATE_FILE} | \
psql -d ${POSTGRES_DB} -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
--no-password
fi
}
function list-snapshot-roles() {
if [ -z ${CERTIFICATE_FILE} ];
then
restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} snapshots
else
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} snapshots --cacert ${CERTIFICATE_FILE}
fi
}
function list-snapshot-db() {
if [ -z ${CERTIFICATE_FILE} ];
then
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} snapshots
else
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} snapshots --cacert ${CERTIFICATE_FILE}
fi
}

View file

@ -0,0 +1,51 @@
#! /usr/bin/env bb
(ns restic-management
(:require
[clojure.spec.alpha :as s]
[clojure.java.io :as io]
[clojure.edn :as edn]))
(s/def ::state string?)
(s/def ::backup-repository-state
(s/keys :req-un [::state]))
(def state {:state ""})
(defn store-backup-repository-state [s]
(spit "backup-repository-state.edn" s))
(defn read-backup-repository-state []
(try
(with-open [r (io/reader "backup-repository-state.edn")]
(edn/read (java.io.PushbackReader. r)))
(catch java.io.IOException e
(printf "Couldn't open '%s': %s\n" "backup-repository-state.edn" (.getMessage e)))
(catch RuntimeException e
(printf "Error parsing edn file '%s': %s\n" "backup-repository-state.edn" (.getMessage e)))))
(defn read-secret [s]
(slurp (str "/var/run/secrets/" s)))
;"/var/run/secrets/rotation-credential-secret/rotation-credential"))
;(println (read-backup-repository-state))
;(println (:state (read-backup-repository-state)))
;(println (s/valid? ::backup-repository-state (read-backup-repository-state)))
(println (read-secret "rotation-credential-secret/rotation-credential"))
(println (read-secret "backup-secrets/restic-password"))
(s/def ::new-password string?)
(s/def ::old-password string?)
(s/def ::password-state
(s/keys :req-un [::new-password ::old-password]))
(defn rotate []
(let [state {:new-password (read-secret "rotation-credential-secret/rotation-credential")
:old-password (read-secret "backup-secrets/restic-password")}]
(store-backup-repository-state (prn-str state))))
(rotate)

View file

@ -0,0 +1,7 @@
FROM dda-backup:latest
# install it
RUN apt update && apt install -qqy openjdk-17-jre-headless
ADD resources /tmp/
RUN rm -rf /root/.m2
RUN /tmp/install-test.bb

View file

@ -0,0 +1,4 @@
{:deps {org.clojure/spec.alpha {:mvn/version "0.4.233"}
orchestra/orchestra {:mvn/version "2021.01.01-1"}
org.domaindrivenarchitecture/dda-backup {:mvn/version "0.1.1-SNAPSHOT"}}}

View file

@ -0,0 +1,32 @@
#!/usr/bin/env bb
(require '[babashka.tasks :as tasks])
(defn curl-and-check!
[filename artifact-url sha256-url]
(let [filepath (str "/tmp/" filename)]
(tasks/shell "curl" "-SsLo" filepath artifact-url)
(tasks/shell "curl" "-SsLo" "/tmp/checksum" sha256-url)
(tasks/shell "bash" "-c" (str "echo \" " filepath "\"|tee -a /tmp/checksum"))
;(tasks/shell "sha256sum" "-c" "--status" "/tmp/checksum")
))
(defn tar-install!
[filename binname]
(let [filepath (str "/tmp/" filename)]
(tasks/shell "tar" "-C" "/tmp" "-xzf" filepath)
(tasks/shell "install" "-m" "0700" "-o" "root" "-g" "root" (str "/tmp/" binname) "/usr/local/bin/")))
(defn install!
[filename]
(tasks/shell "install" "-m" "0700" "-o" "root" "-g" "root" (str "/tmp/" filename) "/usr/local/bin/"))
(tasks/shell "bb" "/tmp/test.bb")
(curl-and-check!
"provs-syspec.jar"
"https://repo.prod.meissa.de/attachments/0a1da41e-aa5b-4a3e-a3b1-215cf2d5b021"
"https://repo.prod.meissa.de/attachments/f227cf65-cb0f-46a7-a6cd-28f46917412a")
(install! "provs-syspec.jar")
(tasks/shell "apt" "update")
(tasks/shell "apt" "install" "-qqy" "openjdk-17-jre-headless")
(tasks/shell "java" "-jar" "/usr/local/bin/provs-syspec.jar" "local" "-c" "/tmp/spec.yml" )

View file

@ -0,0 +1,7 @@
package:
- name: "restic"
command:
- command: "bb -h"
- command: "/tmp/test.bb"

View file

@ -0,0 +1,27 @@
#!/usr/bin/env bb
(require '[babashka.tasks :as tasks]
'[dda.backup.management :as mgm])
(defn restic-repo-init!
[]
(spit "restic-pwd" "ThePassword")
(mgm/init! {:password-file "restic-pwd"
:restic-repository "restic-repo"}))
(defn restic-backup!
[]
(tasks/shell "mkdir" "test-backup")
(spit "test-backup/file" "I was here")
(tasks/shell "restic" "backup" "--password-file" "restic-pwd" "--repo" "restic-repo" "test-backup"))
(defn restic-restore!
[]
(tasks/shell "mkdir" "test-restore")
(tasks/shell "restic" "restore" "--password-file" "restic-pwd" "--repo" "restic-repo" "--target" "test-restore" "latest")
)
(restic-repo-init!)
(restic-backup!)
(restic-restore!)

View file

@ -6,7 +6,7 @@ from ddadevops import *
name = "ddadevops"
MODULE = "clj-cljs"
PROJECT_ROOT_PATH = "../.."
version = "4.7.0"
version = "4.13.2-dev"
@init
def initialize(project):

View file

@ -1 +0,0 @@
478604fe85c711aafe8ef78c0bf25cb93fa46de5a3c07040f25a595096c43f8a kubeconform-v0.6.3.tar.gz

View file

@ -1,32 +1,45 @@
#!/bin/bash
set -eux
set -exo pipefail
function main() {
{
upgradeSystem
mkdir -p /usr/share/man/man1
apt -qqy install openjdk-17-jre-headless leiningen curl
apt-get -qqy install curl openjdk-17-jre-headless leiningen
# shadow-cljs
npm install -g npm
npm install -g --save-dev shadow-cljs
# download kubeconform & graalvm
curl -Lo /tmp/kubeconform-v0.6.3.tar.gz https://github.com/yannh/kubeconform/releases/download/v0.6.3/kubeconform-linux-amd64.tar.gz
# checksum
cd /tmp
sha256sum --check CHECKSUMS
kubeconform_version="0.6.4"
curl -SsLo /tmp/kubeconform-linux-amd64.tar.gz https://github.com/yannh/kubeconform/releases/download/v${kubeconform_version}/kubeconform-linux-amd64.tar.gz
curl -SsLo /tmp/CHECKSUMS https://github.com/yannh/kubeconform/releases/download/v${kubeconform_version}/CHECKSUMS
# checksum kubeconform
checksum
# install kubeconform
tar -xf /tmp/kubeconform-v0.6.3.tar.gz
cp kubeconform /usr/local/bin
tar -C /usr/local/bin -xf /tmp/kubeconform-linux-amd64.tar.gz --exclude=LICENSE
#install pyb
apt -qqy install python3 python3-pip git;
pip3 install pybuilder 'ddadevops>=4.2.0' deprecation dda-python-terraform boto3 pyyaml inflection --break-system-packages;
apt-get -qqy install python3 python3-pip git
pip3 install pybuilder 'ddadevops>=4.7.0' deprecation dda-python-terraform boto3 pyyaml inflection --break-system-packages
#check
lein --help
cleanupDocker
} > /dev/null
}
source /tmp/install_functions.sh
main
function checksum() {
awk '{print $1 " /tmp/" $2}' /tmp/CHECKSUMS|sed -n '2p' > /tmp/kubeconform-checksum
cat /tmp/kubeconform-checksum
sha256sum -c --status /tmp/kubeconform-checksum
}
source /tmp/install_functions_debian.sh
DEBIAN_FRONTEND=noninteractive DEBCONF_NOWARNINGS=yes main

View file

@ -6,7 +6,7 @@ from ddadevops import *
name = "ddadevops"
MODULE = "clj"
PROJECT_ROOT_PATH = "../.."
version = "4.7.0"
version = "4.13.2-dev"
@init
def initialize(project):

View file

@ -3,4 +3,4 @@ FROM debian:stable-slim
ADD resources /tmp
RUN /tmp/install.sh
ENV LANG=en_US.UTF-8 \
JAVA_HOME=/usr/lib64/graalvm/graalvm-community-java17
JAVA_HOME=/usr/lib/jvm/graalvm

View file

@ -1,2 +0,0 @@
478604fe85c711aafe8ef78c0bf25cb93fa46de5a3c07040f25a595096c43f8a kubeconform-v0.6.3.tar.gz
094e5a7dcc4a903b70741d5c3c1688f83e83e2d44eb3d8d798c5d79ed902032c graalvm-community-jdk-17.0.7_linux-x64_bin.tar.gz

View file

@ -1,38 +1,57 @@
#!/bin/bash
set -eux
set -exo pipefail
function main() {
{
upgradeSystem
apt -qqy install curl git openjdk-17-jre-headless leiningen build-essential libz-dev zlib1g-dev;
apt-get -qqy install curl git openjdk-17-jre-headless leiningen build-essential libz-dev zlib1g-dev
# download kubeconform & graalvm
curl -Lo /tmp/kubeconform-v0.6.3.tar.gz https://github.com/yannh/kubeconform/releases/download/v0.6.3/kubeconform-linux-amd64.tar.gz
curl -Lo /tmp/graalvm-community-jdk-17.0.7_linux-x64_bin.tar.gz https://github.com/graalvm/graalvm-ce-builds/releases/download/jdk-17.0.7/graalvm-community-jdk-17.0.7_linux-x64_bin.tar.gz
# checksum
cd /tmp
sha256sum --check CHECKSUMS
kubeconform_version="0.6.4"
graalvm_jdk_version="21.0.2"
curl -SsLo /tmp/kubeconform-linux-amd64.tar.gz https://github.com/yannh/kubeconform/releases/download/v${kubeconform_version}/kubeconform-linux-amd64.tar.gz
curl -SsLo /tmp/CHECKSUMS https://github.com/yannh/kubeconform/releases/download/v${kubeconform_version}/CHECKSUMS
curl -SsLo /tmp/graalvm-community-jdk.tar.gz https://github.com/graalvm/graalvm-ce-builds/releases/download/jdk-${graalvm_jdk_version}/graalvm-community-jdk-${graalvm_jdk_version}_linux-x64_bin.tar.gz
curl -SsLo /tmp/graalvm-checksum https://github.com/graalvm/graalvm-ce-builds/releases/download/jdk-${graalvm_jdk_version}/graalvm-community-jdk-${graalvm_jdk_version}_linux-x64_bin.tar.gz.sha256
# checksum kubeconform & graalvm-jdk
checksum
# install kubeconform
tar -xf /tmp/kubeconform-v0.6.3.tar.gz
cp kubeconform /usr/local/bin
tar -C /usr/local/bin -xf /tmp/kubeconform-linux-amd64.tar.gz --exclude=LICENSE
# install graalvm
tar -xzf graalvm-community-jdk-17.0.7_linux-x64_bin.tar.gz
mv graalvm-community-openjdk-17.0.7+7.1 /usr/lib/jvm/
ln -s /usr/lib/jvm/graalvm-community-openjdk-17.0.7+7.1 /usr/lib/jvm/graalvm
tar -C /usr/lib/jvm/ -xf /tmp/graalvm-community-jdk.tar.gz
dirname_graalvm=$(ls /usr/lib/jvm/|grep -e graa)
ln -s /usr/lib/jvm/$dirname_graalvm /usr/lib/jvm/graalvm
ln -s /usr/lib/jvm/graalvm/bin/gu /usr/local/bin
update-alternatives --install /usr/bin/java java /usr/lib/jvm/graalvm/bin/java 2
gu install native-image
ln -s /usr/lib/jvm/graalvm/bin/native-image /usr/local/bin
#install pyb
apt -qqy install python3 python3-pip;
pip3 install pybuilder 'ddadevops>=4.2.0' deprecation dda-python-terraform boto3 pyyaml inflection --break-system-packages;
apt-get -qqy install python3 python3-pip
pip3 install pybuilder 'ddadevops>=4.7.0' deprecation dda-python-terraform boto3 pyyaml inflection --break-system-packages
#check
native-image --version
lein -v
cleanupDocker
} > /dev/null
}
source /tmp/install_functions.sh
main
function checksum() {
#kubeconform
awk '{print $1 " /tmp/" $2}' /tmp/CHECKSUMS|sed -n '2p' > /tmp/kubeconform-checksum
sha256sum -c --status /tmp/kubeconform-checksum
#graalvm
echo " /tmp/graalvm-community-jdk.tar.gz"|tee -a /tmp/graalvm-checksum
sha256sum -c --status /tmp/graalvm-checksum
}
source /tmp/install_functions_debian.sh
DEBIAN_FRONTEND=noninteractive DEBCONF_NOWARNINGS=yes main

View file

@ -6,7 +6,7 @@ from ddadevops import *
name = "ddadevops"
MODULE = "ddadevops"
PROJECT_ROOT_PATH = "../.."
version = "4.7.0"
version = "4.13.2-dev"
@init
@ -26,7 +26,7 @@ def initialize(project):
"image_tag": f"{image_tag}",
}
project.build_depends_on("ddadevops>=4.0.0")
project.build_depends_on("ddadevops>=4.9.0")
build = DevopsImageBuild(project, input)
build.initialize_build_dir()

View file

@ -1,7 +1,5 @@
FROM python:3.10-alpine
RUN set -eux;
RUN apk add --no-cache python3 py3-pip openssl-dev bash git curl;
RUN python3 -m pip install -U pip;
RUN pip3 install pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml inflection;
RUN pip3 install --upgrade ddadevops --pre
ADD resources /tmp
RUN /tmp/install.sh

View file

@ -0,0 +1,19 @@
#!/bin/sh
set -exo pipefail
function main() {
{
upgradeSystem
apk add --no-cache python3 py3-pip openssl-dev bash git curl
python3 -m pip install -U pip
pip3 install pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml inflection
cleanupDocker
} > /dev/null
}
source /tmp/install_functions_alpine.sh
main

View file

@ -6,7 +6,7 @@ from ddadevops import *
name = "ddadevops"
MODULE = "dind"
PROJECT_ROOT_PATH = "../.."
version = "4.7.0"
version = "4.13.2-dev"
@init
@ -26,7 +26,7 @@ def initialize(project):
"image_tag": f"{image_tag}",
}
project.build_depends_on("ddadevops>=4.0.0")
project.build_depends_on("ddadevops>=4.7.0")
build = DevopsImageBuild(project, input)
build.initialize_build_dir()

View file

@ -1,6 +1,5 @@
FROM docker:latest
RUN set -eux;
RUN apk add --no-cache python3 py3-pip openssl-dev bash git;
RUN python3 -m pip install -U pip;
RUN pip3 install pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml inflection;
WORKDIR /tmp
ADD resources ./
RUN ./install.sh

View file

@ -0,0 +1,17 @@
#!/bin/sh
set -exo pipefail
function main() {
{
upgradeSystem
apk add --no-cache python3 py3-pip openssl-dev bash git
pip3 install --break-system-packages pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml inflection
cleanupDocker
} > /dev/null
}
source /tmp/install_functions_alpine.sh
main

View file

@ -0,0 +1,57 @@
from os import environ
from datetime import datetime
from pybuilder.core import task, init
from ddadevops import *
name = "ddadevops"
MODULE = "kotlin"
PROJECT_ROOT_PATH = "../.."
version = "4.13.2-dev"
@init
def initialize(project):
image_tag = version
if "dev" in image_tag:
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
input = {
"name": name,
"module": MODULE,
"stage": "notused",
"project_root_path": PROJECT_ROOT_PATH,
"build_types": ["IMAGE"],
"mixin_types": [],
"image_naming": "NAME_AND_MODULE",
"image_tag": f"{image_tag}",
}
project.build_depends_on("ddadevops>=4.0.0")
build = DevopsImageBuild(project, input)
build.initialize_build_dir()
@task
def image(project):
build = get_devops_build(project)
build.image()
@task
def drun(project):
build = get_devops_build(project)
build.drun()
@task
def test(project):
build = get_devops_build(project)
build.test()
@task
def publish(project):
build = get_devops_build(project)
build.dockerhub_login()
build.dockerhub_publish()

View file

@ -0,0 +1,4 @@
FROM debian:stable-slim
ADD resources /tmp
RUN /tmp/install.sh

View file

@ -0,0 +1,17 @@
#!/bin/bash
set -exo pipefail
function main() {
{
upgradeSystem
apt-get -qqy install curl git kotlin gradle iputils-ping ssh python3 python3-pip
pip3 install --break-system-packages pybuilder 'ddadevops>=4.7.0' deprecation dda-python-terraform boto3 pyyaml inflection
cleanupDocker
} > /dev/null
}
source /tmp/install_functions_debian.sh
DEBIAN_FRONTEND=noninteractive DEBCONF_NOWARNINGS=yes main

View file

@ -6,7 +6,7 @@ from ddadevops import *
name = "ddadevops"
MODULE = "python"
PROJECT_ROOT_PATH = "../.."
version = "4.7.0"
version = "4.13.2-dev"
@init

View file

@ -1,8 +1,4 @@
FROM python:3.10-alpine
RUN set -eux;
RUN apk add --no-cache build-base rust python3 python3-dev py3-pip py3-setuptools py3-wheel libffi-dev openssl-dev cargo bash git curl;
RUN python3 -m pip install -U pip;
RUN pip3 install pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml inflection;
RUN pip3 install coverage flake8 flake8-polyfill mypy mypy-extensions pycodestyle pyflakes pylint pytest pytest-cov pytest-datafiles types-setuptools types-PyYAML;
RUN pip3 install --upgrade ddadevops --pre
ADD resources /tmp
RUN /tmp/install.sh

View file

@ -0,0 +1,20 @@
#!/bin/sh
set -exo pipefail
function main() {
{
upgradeSystem
apk add --no-cache build-base rust python3 python3-dev py3-pip py3-setuptools py3-wheel libffi-dev openssl-dev cargo bash git curl
python3 -m pip install -U pip
pip3 install pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml inflection \
coverage flake8 flake8-polyfill mypy mypy-extensions pycodestyle pyflakes pylint pytest pytest-cov pytest-datafiles types-setuptools types-PyYAML
cleanupDocker
} > /dev/null
}
source /tmp/install_functions_alpine.sh
main

View file

@ -31,6 +31,12 @@ class ImageBuildService:
self.__copy_build_resource_file_from_package__(
"image/resources/install_functions.sh", devops
)
self.__copy_build_resource_file_from_package__(
"image/resources/install_functions_debian.sh", devops
)
self.__copy_build_resource_file_from_package__(
"image/resources/install_functions_alpine.sh", devops
)
def __copy_build_resources_from_dir__(self, devops: Devops):
image = devops.specialized_builds[BuildType.IMAGE]
@ -45,7 +51,10 @@ class ImageBuildService:
else:
self.__copy_build_resources_from_dir__(devops)
self.file_api.cp_recursive("image", build_path)
self.file_api.cp_recursive("test", build_path)
try:
self.file_api.cp_recursive("test", build_path)
except:
print("Folder 'test' not found")
def image(self, devops: Devops):
image = devops.specialized_builds[BuildType.IMAGE]

View file

@ -53,7 +53,8 @@ class ReleaseService:
bump_version = release_version.create_bump()
release_message = f"release: {release_version.to_string()}"
bump_message = f"bump version to: {bump_version.to_string()}"
self.git_api.tag_annotated(release_version.to_string(), release_message, 0)
release_tag = f"{release.release_tag_prefix}{release_version.to_string()}"
self.git_api.tag_annotated(release_tag, release_message, 0)
self.__set_version_and_commit__(
bump_version,
release.build_files(),
@ -62,11 +63,12 @@ class ReleaseService:
self.git_api.push_follow_tags()
def publish_artifacts(self, release: Release):
token = str(release.release_artifact_token)
release_id = self.__parse_forgejo_release_id__(
self.artifact_deployment_api.create_forgejo_release(
release.forgejo_release_api_endpoint(),
release.version.to_string(),
str(release.release_artifact_token),
token,
)
)
@ -84,7 +86,7 @@ class ReleaseService:
release.forgejo_release_asset_api_endpoint(release_id),
artifact.path(),
artifact.type(),
str(release.release_artifact_token),
token,
)
def __parse_forgejo_release_id__(self, release_response: str) -> int:

View file

@ -11,6 +11,7 @@ class BuildFileType(Enum):
JS = ".json"
JAVA_GRADLE = ".gradle"
JAVA_CLOJURE = ".clj"
JAVA_CLOJURE_EDN = ".edn"
PYTHON = ".py"
@ -41,40 +42,40 @@ class BuildFile(Validateable):
result = BuildFileType.JAVA_CLOJURE
case ".py":
result = BuildFileType.PYTHON
case ".edn":
result = BuildFileType.JAVA_CLOJURE_EDN
case _:
result = None
return result
def __get_file_type_regex_str(self, file_type: BuildFileType):
match file_type:
case BuildFileType.JAVA_GRADLE:
return r"(?P<pre_version>\bversion\s?=\s?)\"(?P<version>\d*\.\d*\.\d*(-SNAPSHOT)?)\""
case BuildFileType.PYTHON:
return r"(?P<pre_version>\bversion\s?=\s?)\"(?P<version>\d*\.\d*\.\d*(-SNAPSHOT|-dev\d*)?)\""
case BuildFileType.JAVA_CLOJURE:
return r"(?P<pre_version>\(defproject\s(\S)*\s)\"(?P<version>\d*\.\d*\.\d*(-SNAPSHOT)?)\""
case BuildFileType.JAVA_CLOJURE_EDN:
return r"(?P<pre_version>\:version\s+)\"(?P<version>\d*\.\d*\.\d*(-SNAPSHOT)?)\""
case _:
return ""
def get_version(self) -> Version:
try:
match self.build_file_type():
build_file_type = self.build_file_type()
match build_file_type:
case BuildFileType.JS:
version_str = json.loads(self.content)["version"]
case BuildFileType.JAVA_GRADLE:
# TODO: '\nversion = ' will not parse all ?!
version_line = re.search("\nversion = .*", self.content)
version_line_group = version_line.group()
version_string = re.search(
"[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?", version_line_group
)
version_str = version_string.group()
case BuildFileType.PYTHON:
# TODO: '\nversion = ' will not parse all ?!
version_line = re.search("\nversion = .*\n", self.content)
version_line_group = version_line.group()
version_string = re.search(
"[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?(-dev)?[0-9]*",
version_line_group,
)
version_str = version_string.group()
case BuildFileType.JAVA_CLOJURE:
# TODO: unsure about the trailing '\n' !
version_line = re.search("\\(defproject .*\n", self.content)
version_line_group = version_line.group()
version_string = re.search(
"[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?", version_line_group
)
version_str = version_string.group()
case (
BuildFileType.JAVA_GRADLE
| BuildFileType.PYTHON
| BuildFileType.JAVA_CLOJURE
| BuildFileType.JAVA_CLOJURE_EDN
):
version_str = re.search(
self.__get_file_type_regex_str(build_file_type), self.content
).group("version")
except:
raise RuntimeError(f"Version not found in file {self.file_path}")
@ -84,32 +85,26 @@ class BuildFile(Validateable):
return result
def set_version(self, new_version: Version):
# TODO: How can we create regex-pattern constants to use them at both places?
if new_version.is_snapshot():
new_version.snapshot_suffix = self.get_default_suffix()
try:
match self.build_file_type():
build_file_type = self.build_file_type()
match build_file_type:
case BuildFileType.JS:
json_data = json.loads(self.content)
json_data["version"] = new_version.to_string()
self.content = json.dumps(json_data, indent=4)
case BuildFileType.JAVA_GRADLE:
case (
BuildFileType.JAVA_GRADLE
| BuildFileType.PYTHON
| BuildFileType.JAVA_CLOJURE
| BuildFileType.JAVA_CLOJURE_EDN
):
substitute = re.sub(
'\nversion = "[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?"',
f'\nversion = "{new_version.to_string()}"',
self.content,
)
self.content = substitute
case BuildFileType.PYTHON:
substitute = re.sub(
'\nversion = "[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?(-dev)?[0-9]*"',
f'\nversion = "{new_version.to_string()}"',
self.content,
)
self.content = substitute
case BuildFileType.JAVA_CLOJURE:
# TODO: we should stick here on defproject instead of first line!
substitute = re.sub(
'"[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?"',
f'"{new_version.to_string()}"',
self.__get_file_type_regex_str(build_file_type),
rf'\g<pre_version>"{new_version.to_string()}"',
self.content,
1,
)

View file

@ -78,6 +78,12 @@ class DnsRecord(Validateable):
result.append("ipv4 & ipv6 may not both be empty.")
return result
def ip(self) -> str:
if (self.ipv4):
return self.ipv4
else:
return self.ipv6
class Devops(Validateable):
def __init__(

View file

@ -20,6 +20,14 @@ CONFIG_CERTMANAGER = """certmanager:
"""
CONFIG_ECHO = """echo: $echo
"""
CONFIG_HETZNER_CSI = """hetzner:
hcloudApiToken:
source: "PLAIN" # PLAIN, GOPASS or PROMPT
parameter: $hcloud_api # the api key for the hetzner cloud
encryptionPassphrase:
source: "PLAIN" # PLAIN, GOPASS or PROMPT
parameter: $encryption # the encryption passphrase for created volumes
"""
class K3s(Validateable):
@ -28,8 +36,11 @@ class K3s(Validateable):
self.k3s_letsencrypt_email = inp.get("k3s_letsencrypt_email")
self.k3s_letsencrypt_endpoint = inp.get("k3s_letsencrypt_endpoint", "staging")
self.k3s_app_filename_to_provision = inp.get("k3s_app_filename_to_provision")
self.k3s_enable_echo = inp.get("k3s_enable_echo", "false")
self.k3s_enable_echo = inp.get("k3s_enable_echo", None)
self.k3s_provs_template = inp.get("k3s_provs_template", None)
self.k3s_enable_hetzner_csi = inp.get("k3s_enable_hetzner_csi", False)
self.k3s_hetzner_api_token = inp.get("k3s_hetzner_api_token", None)
self.k3s_hetzner_encryption_passphrase = inp.get("k3s_hetzner_encryption_passphrase", None)
self.provision_dns: Optional[DnsRecord] = None
def validate(self) -> List[str]:
@ -37,6 +48,9 @@ class K3s(Validateable):
result += self.__validate_is_not_empty__("k3s_letsencrypt_email")
result += self.__validate_is_not_empty__("k3s_letsencrypt_endpoint")
result += self.__validate_is_not_empty__("k3s_app_filename_to_provision")
if self.k3s_enable_hetzner_csi:
result += self.__validate_is_not_empty__("k3s_hetzner_api_token")
result += self.__validate_is_not_empty__("k3s_hetzner_encryption_passphrase")
if self.provision_dns:
result += self.provision_dns.validate()
return result
@ -61,6 +75,9 @@ class K3s(Validateable):
substitutes["letsencrypt_endpoint"] = self.k3s_letsencrypt_endpoint
if self.k3s_enable_echo is not None:
substitutes["echo"] = self.k3s_enable_echo
if self.k3s_enable_hetzner_csi:
substitutes["hcloud_api"] = self.k3s_hetzner_api_token
substitutes["encryption"] = self.k3s_hetzner_encryption_passphrase
return self.__config_template__().substitute(substitutes)
def command(self, devops: Devops):
@ -69,7 +86,7 @@ class K3s(Validateable):
cmd = [
"provs-server.jar",
"k3s",
f"{self.k3s_provision_user}@{self.provision_dns.fqdn}",
f"{self.k3s_provision_user}@{self.provision_dns.ip()}",
"-c",
f"{devops.build_path()}/out_k3sServerConfig.yaml",
"-a",
@ -89,4 +106,6 @@ class K3s(Validateable):
template_text += CONFIG_IPV4
if self.provision_dns.ipv6 is not None:
template_text += CONFIG_IPV6
if self.k3s_enable_hetzner_csi:
template_text += CONFIG_HETZNER_CSI
return Template(template_text)

View file

@ -24,6 +24,7 @@ class Release(Validateable):
"release_secondary_build_files", []
)
self.version = version
self.release_tag_prefix = inp.get("release_tag_prefix", "")
self.release_artifact_server_url = inp.get("release_artifact_server_url")
self.release_organisation = inp.get("release_organisation")
self.release_repository_name = inp.get("release_repository_name")

View file

@ -58,10 +58,8 @@ class ImageApi:
)
def drun(self, name: str):
run(
f'docker run -it --entrypoint="" {name} /bin/bash',
shell=True,
check=True,
self.execution_api.execute_live(
f'docker run -it {name} /bin/bash'
)
def dockerhub_login(self, username: str, password: str):
@ -123,13 +121,12 @@ class ExecutionApi:
if dry_run:
print(command)
else:
process = Popen(command, stdout=PIPE, shell=shell)
for line in iter(process.stdout.readline, b""):
print(line.decode("utf-8"), end="")
process.stdout.close()
return_code = process.wait()
if return_code != 0:
raise RuntimeError(f"Execute live failed with code: {return_code}")
process = Popen(command, shell=shell)
outs, errs = process.communicate()
while outs is not None:
stdout.buffer.write(outs)
if process.returncode != 0:
raise RuntimeError(f"Execute live '{command}' failed with code {process.returncode}\nerrs: {errs}")
class EnvironmentApi:
@ -224,14 +221,15 @@ class ArtifactDeploymentApi:
self.execution_api = ExecutionApi()
def create_forgejo_release(self, api_endpoint_url: str, tag: str, token: str):
sanitized_command = (
command = (
f'curl -X "POST" "{api_endpoint_url}" '
+ '-H "accept: application/json" -H "Content-Type: application/json" '
+ f'-d \'{{ "body": "Provides files for release {tag}", "tag_name": "{tag}"}}\' '
+ ' -H "accept: application/json" -H "Content-Type: application/json"'
+ f' -d \'{{ "body": "Provides files for release {tag}", "tag_name": "{tag}"}}\''
) # noqa: E501
command = sanitized_command + f'-H "Authorization: token {token}"'
print(command + ' -H "Authorization: token xxxx"')
return self.execution_api.execute_secure(
command=command, sanitized_command=sanitized_command
command=command + f' -H "Authorization: token {token}"',
sanitized_command=command + ' -H "Authorization: token xxxx"',
)
def add_asset_to_release(
@ -241,15 +239,16 @@ class ArtifactDeploymentApi:
attachment_type: str,
token: str,
):
command = (
f'curl -X "POST" "{api_endpoint_url}"'
+ ' -H "accept: application/json"'
+ ' -H "Content-Type: multipart/form-data"'
+ f' -F "attachment=@{attachment};type={attachment_type}"'
) # noqa: E501
print(command + ' -H "Authorization: token xxxx"')
return self.execution_api.execute_secure(
f'curl -X "POST" "{api_endpoint_url}" '
+ f'-H "accept: application/json" -H "Authorization: token {token}" '
+ '-H "Content-Type: multipart/form-data" '
+ f'-F "attachment=@{attachment};type={attachment_type}"',
sanitized_command=f'curl -X "POST" "{api_endpoint_url}" '
+ '-H "accept: application/json" '
+ '-H "Content-Type: multipart/form-data" '
+ f'-F "attachment=@{attachment};type={attachment_type}"',
command=command + f' -H "Authorization: token {token}"',
sanitized_command=command + ' -H "Authorization: token xxxx"',
)
def calculate_sha256(self, path: Path):

View file

@ -1,8 +1,11 @@
#
#deprecated, we recommend to use install_functions_debian.sh instead. We will going to remove install_functions.sh in a future release.
#
function upgradeSystem() {
export DEBIAN_FRONTEND=noninteractive
apt-get update > /dev/null
apt-get -y install apt-utils > /dev/null
apt-get -qqy dist-upgrade > /dev/null
{
apt-get update
apt-get -qqy upgrade
} > /dev/null
}
function cleanupDocker() {

View file

@ -0,0 +1,21 @@
function upgradeSystem() {
apk -U upgrade
}
function cleanupDocker() {
rm -f /root/.ssh/authorized_keys
rm -f /root/.ssh/authorized_keys2
apk cache clean
rm -rf /tmp/*
find /var/cache -type f -exec rm -rf {} \;
find /var/log/ -name '*.log' -exec rm -f {} \;
}
function cleanupAmi() {
rm -f /home/ubuntu/.ssh/authorized_keys
rm -f /home/ubuntu/.ssh/authorized_keys2
cleanupDocker
}

View file

@ -0,0 +1,25 @@
function upgradeSystem() {
apt-get update
apt-get -qqy upgrade
}
function cleanupDocker() {
rm -f /root/.ssh/authorized_keys
rm -f /root/.ssh/authorized_keys2
apt-get clean
apt-get -qqy autoremove --purge
apt-get -qqy autoclean
rm -rf /var/lib/apt/lists/
rm -rf /tmp/*
find /var/cache -type f -exec rm -rf {} \;
find /var/log/ -name '*.log' -exec rm -f {} \;
}
function cleanupAmi() {
rm -f /home/ubuntu/.ssh/authorized_keys
rm -f /home/ubuntu/.ssh/authorized_keys2
cleanupDocker
}

View file

@ -13,7 +13,7 @@ from src.test.python.domain.helper import (
from src.main.python.ddadevops.application import ReleaseService
def test_sould_update_release_type():
def test_should_update_release_type():
sut = ReleaseService(
GitApiMock(), ArtifactDeploymentApiMock(), BuildFileRepositoryMock("build.py")
)
@ -26,7 +26,7 @@ def test_sould_update_release_type():
sut.update_release_type(release, "NOT_EXISTING")
def test_sould_publish_artifacts():
def test_should_publish_artifacts():
mock = ArtifactDeploymentApiMock(release='{"id": 2345}')
sut = ReleaseService(GitApiMock(), mock, BuildFileRepositoryMock())
devops = build_devops(
@ -41,7 +41,7 @@ def test_sould_publish_artifacts():
sut.publish_artifacts(release)
assert "http://repo.test/api/v1/repos/orga/repo/releases/2345/assets" == mock.add_asset_to_release_api_endpoint
def test_sould_throw_exception_if_there_was_an_error_in_publish_artifacts():
def test_should_throw_exception_if_there_was_an_error_in_publish_artifacts():
devops = build_devops(
{
"release_artifacts": ["target/art"],

View file

@ -156,6 +156,9 @@ class GitApiMock:
def push(self):
pass
def push_follow_tags(self):
pass
def checkout(self, branch: str):
pass

View file

@ -13,7 +13,7 @@ from src.main.python.ddadevops.domain import (
from .helper import build_devops, devops_config
def test_sould_validate_release():
def test_should_validate_release():
sut = Artifact("x")
assert sut.is_valid()

View file

@ -7,7 +7,7 @@ from src.main.python.ddadevops.domain import (
)
def test_sould_validate_build_file():
def test_should_validate_build_file():
sut = BuildFile(Path("./project.clj"), "content")
assert sut.is_valid()
@ -18,7 +18,7 @@ def test_sould_validate_build_file():
assert not sut.is_valid()
def test_sould_calculate_build_type():
def test_should_calculate_build_type():
sut = BuildFile(Path("./project.clj"), "content")
assert sut.build_file_type() == BuildFileType.JAVA_CLOJURE
@ -29,7 +29,7 @@ def test_sould_calculate_build_type():
assert sut.build_file_type() == BuildFileType.JS
def test_sould_parse_and_set_js():
def test_should_parse_and_set_js():
sut = BuildFile(
Path("./package.json"),
"""
@ -77,7 +77,7 @@ def test_sould_parse_and_set_js():
)
def test_sould_parse_and_set_version_for_gradle():
def test_should_parse_and_set_version_for_gradle():
sut = BuildFile(
Path("./build.gradle"),
"""
@ -97,7 +97,7 @@ version = "1.1.5-SNAPSHOT"
assert '\nversion = "2.0.0"\n' == sut.content
def test_sould_parse_and_set_version_for_py():
def test_should_parse_and_set_version_for_py():
sut = BuildFile(
Path("./build.py"),
"""
@ -143,7 +143,7 @@ version = "1.1.5-SNAPSHOT"
assert '\nversion = "2.0.0"\n' == sut.content
def test_sould_parse_and_set_version_for_clj():
def test_should_parse_and_set_version_for_clj():
sut = BuildFile(
Path("./project.clj"),
"""
@ -182,3 +182,71 @@ def test_sould_parse_and_set_version_for_clj():
'\n(defproject org.domaindrivenarchitecture/c4k-jira "2.0.0"\n:dependencies [[org.clojure/clojure "1.11.0"]]\n)\n '
== sut.content
)
def test_should_parse_and_set_version_for_clj_edn():
sut = BuildFile(
Path("./deps.edn"),
"""
{:project {:name org.domaindrivenarchitecture/dda-backup
:version "1.1.5-SNAPSHOT"}
}
""",
)
assert sut.get_version() == Version.from_str("1.1.5-SNAPSHOT", "SNAPSHOT")
sut = BuildFile(
Path("./deps.edn"),
"""
{:project {:name org.domaindrivenarchitecture/dda-backup
:version "1.1.5-SNAPSHOT"}
}
""",
)
sut.set_version(Version.from_str("1.1.5-SNAPSHOT", "SNAPSHOT").create_major())
assert (
'\n{:project {:name org.domaindrivenarchitecture/dda-backup\n :version "2.0.0"}\n\n}\n'
== sut.content
)
def test_should_throw_for_clj_wrong_version():
sut = BuildFile(
Path("./project.clj"),
"""
(defproject org.domaindrivenarchitecture/c4k-jira "1.1.5-Snapshot"
:description "jira c4k-installation package"
:url "https://domaindrivenarchitecture.org"
)
""",
)
with pytest.raises(RuntimeError):
sut.get_version()
def test_should_ignore_first_version_for_py():
sut = BuildFile(
Path("./build.py"),
"""
from pybuilder.core import init, use_plugin, Author
use_plugin("python.core")
name = "ddadevops"
project_version = "0.0.2-dev1"
version = "1.1.5-dev12"
summary = "tools to support builds combining gopass, terraform, dda-pallet, aws & hetzner-cloud"
""",
)
assert sut.get_version() == Version.from_str("1.1.5-dev12", "dev")
def test_should_ignore_first_version_for_gradle():
sut = BuildFile(
Path("./build.gradle"),
"""
kotlin_version = "3.3.3"
version = "1.1.5-SNAPSHOT"
""",
)
assert sut.get_version() == Version.from_str("1.1.5-SNAPSHOT", "SNAPSHOT")

View file

@ -24,7 +24,7 @@ def test_should_calculate_command():
assert (
"provs-server.jar "
+ "k3s "
+ "k3s_provision_user@example.org "
+ "k3s_provision_user@::1 "
+ "-c "
+ "root_path/target/name/module/out_k3sServerConfig.yaml "
+ "-a "

View file

@ -15,7 +15,7 @@ from src.main.python.ddadevops.domain import (
from .helper import build_devops, devops_config
def test_sould_validate_release():
def test_should_validate_release():
sut = Release(
devops_config(
{
@ -49,7 +49,7 @@ def test_sould_validate_release():
assert not sut.is_valid()
def test_sould_calculate_build_files():
def test_should_calculate_build_files():
sut = Release(
devops_config(
{

View file

@ -5,7 +5,9 @@ from pybuilder.core import Project
from src.main.python.ddadevops.release_mixin import ReleaseMixin
from src.main.python.ddadevops.domain import Devops, Release
from .domain.helper import devops_config
from src.main.python.ddadevops.application import ReleaseService
from src.main.python.ddadevops.infrastructure import BuildFileRepository
from .domain.helper import devops_config, GitApiMock, ArtifactDeploymentApiMock
from .resource_helper import copy_resource
@ -30,3 +32,37 @@ def test_release_mixin(tmp_path):
sut.initialize_build_dir()
assert sut.build_path() == f"{str_tmp_path}/target/name/release-test"
def test_release_mixin_different_version_suffixes(tmp_path):
str_tmp_path = str(tmp_path)
copy_resource(Path("config.py"), tmp_path)
copy_resource(Path("config.gradle"), tmp_path)
project = Project(str_tmp_path, name="name")
os.environ["RELEASE_ARTIFACT_TOKEN"] = "ratoken"
sut = ReleaseMixin(
project,
devops_config(
{
"project_root_path": str_tmp_path,
"mixin_types": ["RELEASE"],
"build_types": [],
"module": "release-test",
"release_current_branch": "main",
"release_main_branch": "main",
"release_primary_build_file": "config.py",
"release_secondary_build_files": ["config.gradle"],
}
),
)
sut.release_service = ReleaseService(GitApiMock(), ArtifactDeploymentApiMock(), BuildFileRepository(project.basedir))
sut.initialize_build_dir()
sut.update_release_type("PATCH")
sut.prepare_release()
sut.tag_bump_and_push_release()
assert sut.release_service.build_file_repository.get(Path("config.py")).get_version().to_string() == "3.1.5-dev"
assert sut.release_service.build_file_repository.get(Path("config.gradle")).get_version().to_string() == "3.1.5-SNAPSHOT"