Compare commits
129 commits
Author | SHA1 | Date | |
---|---|---|---|
3bc72f5bb8 | |||
e9fdfdf520 | |||
7fa2a8056d | |||
dfb46d76a5 | |||
39c6b95af8 | |||
0935eae193 | |||
58a1b005e9 | |||
bba684d76e | |||
ec150dde62 | |||
76d4ad16dc | |||
126ae37845 | |||
e9f1915655 | |||
7448333d7c | |||
e8555798e4 | |||
cf4d1e450c | |||
a661eaf3ca | |||
f4da27f63f | |||
e764534487 | |||
6093d160e8 | |||
4d8dc95d8e | |||
e6f39eab21 | |||
0cb4bc43f9 | |||
5b5bc0ab96 | |||
b139865f89 | |||
e4fa06fc42 | |||
4fa849b72b | |||
48bbbe6f6e | |||
bf843edb80 | |||
5e8c21c521 | |||
3bc3a0cd7e | |||
56bc215f26 | |||
678b75ae6f | |||
d133b281f9 | |||
581449fba4 | |||
b38876d9ef | |||
90c4d4ec9d | |||
c0daa85612 | |||
0bdd13cf8a | |||
1bba35963a | |||
2b7fe54f76 | |||
e96581754c | |||
4034b0022b | |||
87cc56cdd7 | |||
a9d98a6d0c | |||
671a3b8cbb | |||
011fc848af | |||
c5af5c9198 | |||
afec1fdd0c | |||
6513e00e54 | |||
3cc6206d06 | |||
48452baac5 | |||
5875642777 | |||
58d8d46a0c | |||
6e57204ce5 | |||
d3f1204932 | |||
305c9a6bd0 | |||
751bc26a21 | |||
202c07150c | |||
f5c75a31f5 | |||
7c24477348 | |||
237691f79f | |||
c6c8de9b0f | |||
a7a00756bc | |||
e822e3d0f0 | |||
bbd12cda4e | |||
adcf93d321 | |||
c9df6082ae | |||
5825bcbf47 | |||
|
b8f7b72a61 | ||
|
f4d30706b7 | ||
|
72cd6a65d2 | ||
|
fde5061bf3 | ||
0dcb375e15 | |||
c6872458e2 | |||
a5a653b213 | |||
f12d43b9bc | |||
5f18c7ddb3 | |||
c92923f6b9 | |||
2aec5b44d2 | |||
52622b1f2d | |||
822bebc26f | |||
0ba623c560 | |||
147cb1bd72 | |||
56dc19322a | |||
ee20925e04 | |||
2455ea0ff0 | |||
739940cdd3 | |||
47dc81860a | |||
308c86595c | |||
45745f3ebd | |||
5b5249eb55 | |||
d2a3e60edf | |||
8324727b8a | |||
c79c0c1a7f | |||
d5c9bae212 | |||
2914be8a88 | |||
bfc55293dc | |||
9a0573bc33 | |||
701047fd4f | |||
9638450ef6 | |||
d90f68ce1a | |||
f3f51b165a | |||
29d439e82a | |||
4d8961557c | |||
3e49e31e66 | |||
5053b79ad4 | |||
215a9bf0fe | |||
97978e9950 | |||
7375ba16cc | |||
07cf837ac6 | |||
6399a1dfeb | |||
36df9f486b | |||
617d909438 | |||
bcccfd8b9c | |||
f7897a574d | |||
c6217bd0a2 | |||
e5d1203435 | |||
78824ea38b | |||
288247be8e | |||
bc06c34ea3 | |||
03c4229cf0 | |||
32ae4f2a6f | |||
3bbae609d7 | |||
b8f6129146 | |||
ef2efc40f7 | |||
b3a612c938 | |||
|
edd2ae5743 | ||
00202edecc | |||
31ac285df0 |
61 changed files with 1076 additions and 168 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -109,3 +109,6 @@ venv.bak/
|
||||||
|
|
||||||
.clj-kondo/
|
.clj-kondo/
|
||||||
.lsp/
|
.lsp/
|
||||||
|
.calva/
|
||||||
|
.cpcache/
|
||||||
|
infrastructure/backup/image/resources/backup-repository-state.edn
|
||||||
|
|
|
@ -4,14 +4,14 @@ stages:
|
||||||
- image
|
- image
|
||||||
|
|
||||||
.py: &py
|
.py: &py
|
||||||
image: "domaindrivenarchitecture/ddadevops-python:4.7.0"
|
image: "domaindrivenarchitecture/ddadevops-python:4.10.7"
|
||||||
before_script:
|
before_script:
|
||||||
- export RELEASE_ARTIFACT_TOKEN=$MEISSA_REPO_BUERO_RW
|
- export RELEASE_ARTIFACT_TOKEN=$MEISSA_REPO_BUERO_RW
|
||||||
- python --version
|
- python --version
|
||||||
- pip install -r requirements.txt
|
- pip install -r requirements.txt
|
||||||
|
|
||||||
.img: &img
|
.img: &img
|
||||||
image: "domaindrivenarchitecture/ddadevops-dind:4.7.0"
|
image: "domaindrivenarchitecture/ddadevops-dind:4.10.7"
|
||||||
services:
|
services:
|
||||||
- docker:dind
|
- docker:dind
|
||||||
before_script:
|
before_script:
|
||||||
|
@ -80,3 +80,10 @@ ddadevops-image-publish:
|
||||||
stage: image
|
stage: image
|
||||||
script:
|
script:
|
||||||
- cd infrastructure/ddadevops && pyb image publish
|
- cd infrastructure/ddadevops && pyb image publish
|
||||||
|
|
||||||
|
kotlin-image-publish:
|
||||||
|
<<: *img
|
||||||
|
<<: *tag_only
|
||||||
|
stage: image
|
||||||
|
script:
|
||||||
|
- cd infrastructure/kotlin && pyb image publish
|
||||||
|
|
32
README.md
32
README.md
|
@ -1,8 +1,7 @@
|
||||||
# dda-devops-build
|
# dda-devops-build
|
||||||
|
|
||||||
[![Slack](https://img.shields.io/badge/chat-clojurians-green.svg?style=flat)](https://clojurians.slack.com/messages/#dda-pallet/) | [<img src="https://meissa-gmbh.de/img/community/Mastodon_Logotype.svg" width=20 alt="team@social.meissa-gmbh.de"> team@social.meissa-gmbh.de](https://social.meissa-gmbh.de/@team) | [Website & Blog](https://domaindrivenarchitecture.org)
|
[![Slack](https://img.shields.io/badge/chat-clojurians-green.svg?style=flat)](https://clojurians.slack.com/messages/#dda-pallet/) | [<img src="https://domaindrivenarchitecture.org/img/delta-chat.svg" width=20 alt="DeltaChat"> chat over e-mail](mailto:buero@meissa-gmbh.de?subject=community-chat) | [<img src="https://meissa.de/images/parts/contact/mastodon36_hue9b2464f10b18e134322af482b9c915e_5501_filter_14705073121015236177.png" width=20 alt="M"> meissa@social.meissa-gmbh.de](https://social.meissa-gmbh.de/@meissa) | [Blog](https://domaindrivenarchitecture.org) | [Website](https://meissa.de)
|
||||||
|
|
||||||
![release prod](https://github.com/DomainDrivenArchitecture/dda-devops-build/workflows/release%20prod/badge.svg)
|
|
||||||
|
|
||||||
dda-devops-build integrates all the tools we use to work with clouds & provide some nice functions around.
|
dda-devops-build integrates all the tools we use to work with clouds & provide some nice functions around.
|
||||||
|
|
||||||
|
@ -84,6 +83,10 @@ Principles we follow are:
|
||||||
* Seperate build artefacts from version controlled code
|
* Seperate build artefacts from version controlled code
|
||||||
* Domain Driven Design - in order to stay sustainable
|
* Domain Driven Design - in order to stay sustainable
|
||||||
|
|
||||||
|
## Example Project
|
||||||
|
|
||||||
|
An example project which is using dda-devops-build can be found at: https://repo.prod.meissa.de/meissa/buildtest
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
Ensure that yout python3 version is at least Python 3.10
|
Ensure that yout python3 version is at least Python 3.10
|
||||||
|
@ -94,17 +97,9 @@ pip3 install -r requirements.txt
|
||||||
export PATH=$PATH:~/.local/bin
|
export PATH=$PATH:~/.local/bin
|
||||||
```
|
```
|
||||||
|
|
||||||
## Reference
|
## Example Project
|
||||||
|
|
||||||
* [DevopsBuild](./doc/DevopsBuild.md)
|
An example project which is using dda-devops-build can be found at: https://repo.prod.meissa.de/meissa/buildtest
|
||||||
* [DevopsImageBuild](./doc/DevopsImageBuild.md)
|
|
||||||
* [DevopsTerraformBuild](./doc/DevopsTerraformBuild.md)
|
|
||||||
* [AwsProvider](doc/DevopsTerraformBuildWithAwsProvider.md)
|
|
||||||
* [DigitaloceanProvider](doc/DevopsTerraformBuildWithDigitaloceanProvider.md)
|
|
||||||
* [HetznerProvider](doc/DevopsTerraformBuildWithHetznerProvider.md)
|
|
||||||
* [ReleaseMixin](./doc/ReleaseMixin.md)
|
|
||||||
* [ProvsK3sBuild](doc/ProvsK3sBuild.md)
|
|
||||||
* [C4kBuild](doc/C4kBuild.md)
|
|
||||||
|
|
||||||
## Example Build
|
## Example Build
|
||||||
|
|
||||||
|
@ -190,6 +185,19 @@ pyb [patch|minor|major]
|
||||||
pip3 install --upgrade ddadevops
|
pip3 install --upgrade ddadevops
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Reference
|
||||||
|
|
||||||
|
* [DevopsBuild](./doc/DevopsBuild.md)
|
||||||
|
* [DevopsImageBuild](./doc/DevopsImageBuild.md)
|
||||||
|
* [DevopsTerraformBuild](./doc/DevopsTerraformBuild.md)
|
||||||
|
* [AwsProvider](doc/DevopsTerraformBuildWithAwsProvider.md)
|
||||||
|
* [DigitaloceanProvider](doc/DevopsTerraformBuildWithDigitaloceanProvider.md)
|
||||||
|
* [HetznerProvider](doc/DevopsTerraformBuildWithHetznerProvider.md)
|
||||||
|
* [ReleaseMixin](./doc/ReleaseMixin.md)
|
||||||
|
* [ProvsK3sBuild](doc/ProvsK3sBuild.md)
|
||||||
|
* [C4kBuild](doc/C4kBuild.md)
|
||||||
|
|
||||||
|
|
||||||
## Development & mirrors
|
## Development & mirrors
|
||||||
|
|
||||||
Development happens at: https://repo.prod.meissa.de/meissa/dda-devops-build
|
Development happens at: https://repo.prod.meissa.de/meissa/dda-devops-build
|
||||||
|
|
11
build.py
11
build.py
|
@ -33,7 +33,7 @@ default_task = "dev"
|
||||||
name = "ddadevops"
|
name = "ddadevops"
|
||||||
MODULE = "not-used"
|
MODULE = "not-used"
|
||||||
PROJECT_ROOT_PATH = "."
|
PROJECT_ROOT_PATH = "."
|
||||||
version = "4.7.4"
|
version = "4.13.2-dev"
|
||||||
summary = "tools to support builds combining gopass, terraform, dda-pallet, aws & hetzner-cloud"
|
summary = "tools to support builds combining gopass, terraform, dda-pallet, aws & hetzner-cloud"
|
||||||
description = __doc__
|
description = __doc__
|
||||||
authors = [Author("meissa GmbH", "buero@meissa-gmbh.de")]
|
authors = [Author("meissa GmbH", "buero@meissa-gmbh.de")]
|
||||||
|
@ -102,6 +102,7 @@ def initialize(project):
|
||||||
"infrastructure/ddadevops/build.py",
|
"infrastructure/ddadevops/build.py",
|
||||||
"infrastructure/clj-cljs/build.py",
|
"infrastructure/clj-cljs/build.py",
|
||||||
"infrastructure/clj/build.py",
|
"infrastructure/clj/build.py",
|
||||||
|
"infrastructure/kotlin/build.py",
|
||||||
],
|
],
|
||||||
"release_artifacts": [],
|
"release_artifacts": [],
|
||||||
"release_artifact_server_url": "https://repo.prod.meissa.de",
|
"release_artifact_server_url": "https://repo.prod.meissa.de",
|
||||||
|
@ -139,13 +140,7 @@ def lint(project):
|
||||||
shell=True,
|
shell=True,
|
||||||
check=True,
|
check=True,
|
||||||
)
|
)
|
||||||
run(
|
|
||||||
"pylint -d W0511,R0903,C0301,W0614,C0114,C0115,C0116,similarities,W1203,W0702,W0702,"
|
|
||||||
+ "R0913,R0902,R0914,R1732,R1705,W0707,C0123,W0703,C0103 src/main/python/ddadevops/",
|
|
||||||
shell=True,
|
|
||||||
check=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@task
|
@task
|
||||||
def patch(project):
|
def patch(project):
|
||||||
|
|
|
@ -35,7 +35,12 @@ classDiagram
|
||||||
| name | name in context of build & ENV | - | |
|
| name | name in context of build & ENV | - | |
|
||||||
|
|
||||||
## Example Usage
|
## Example Usage
|
||||||
### build.py
|
|
||||||
|
### Example project
|
||||||
|
|
||||||
|
A complete example project you can find on: https://repo.prod.meissa.de/meissa/buildtest
|
||||||
|
|
||||||
|
### Example of a build.py
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from os import environ
|
from os import environ
|
||||||
|
|
|
@ -13,14 +13,17 @@ classDiagram
|
||||||
|
|
||||||
## Input
|
## Input
|
||||||
|
|
||||||
| name | description | default |
|
| name | description | default |
|
||||||
| ----------------------------- | ----------------------------------------------------------------- | --------- |
|
| --------------------------------- | ----------------------------------------------------------------- | --------- |
|
||||||
| k3s_provision_user | the user used to provision k3s | "root" |
|
| k3s_provision_user | the user used to provision k3s | "root" |
|
||||||
| k3s_letsencrypt_email | email address used for letsencrypt | |
|
| k3s_letsencrypt_email | email address used for letsencrypt | |
|
||||||
| k3s_letsencrypt_endpoint | letsencrypt endpoint. Valid values are staging, prod | "staging" |
|
| k3s_letsencrypt_endpoint | letsencrypt endpoint. Valid values are staging, prod | "staging" |
|
||||||
| k3s_app_filename_to_provision | an k8s manifest to apply imediately after k3s setup was sucessful | |
|
| k3s_app_filename_to_provision | an k8s manifest to apply imediately after k3s setup was sucessful | |
|
||||||
| k3s_enable_echo | provision the echo app on k3s. Valid values are true, false | "false" |
|
| k3s_enable_echo | provision the echo app on k3s. Valid values are true, false | "false" |
|
||||||
| k3s_provs_template | use a individual template for provs config | None |
|
| k3s_provs_template | use a individual template for provs config | None |
|
||||||
|
| k3s_enable_hetzner_csi | enable hetzner csi | False |
|
||||||
|
| k3s_hetzner_api_token | hetzner_api_token | None |
|
||||||
|
| k3s_hetzner_encryption_passphrase | encryption passphrase for volumes | None |
|
||||||
|
|
||||||
### Credentials Mapping defaults
|
### Credentials Mapping defaults
|
||||||
|
|
||||||
|
|
|
@ -26,22 +26,23 @@ classDiagram
|
||||||
## Input
|
## Input
|
||||||
|
|
||||||
| name | description | default |
|
| name | description | default |
|
||||||
| ----------------------------- | --------------------------------------------------------------------------------------------------------------------- | --------------- |
|
| ----------------------------- |-----------------------------------------------------------------------------------------------------------------------| --------------- |
|
||||||
| release_type | one of MAJOR, MINOR, PATCH, NONE | "NONE" |
|
| release_type | one of MAJOR, MINOR, PATCH, NONE | "NONE" |
|
||||||
| release_main_branch | the name of your trank | "main" |
|
| release_main_branch | the name of your trunk | "main" |
|
||||||
| release_primary_build_file | path to the build file having the leading version info (read & write). Valid extensions are .clj, .json, .gradle, .py | "./project.clj" |
|
| release_primary_build_file | path to the build file having the leading version info (read & write). Valid extensions are .clj, .json, .gradle, .py | "./project.clj" |
|
||||||
| release_secondary_build_files | list of secondary build files, version is written in. | [] |
|
| release_secondary_build_files | list of secondary build files, version is written in. | [] |
|
||||||
| release_artifact_server_url | Optional: The base url of your forgejo/gitea instance to publish a release tode | |
|
| release_artifact_server_url | Optional: The base url of your forgejo/gitea instance to publish a release tode | |
|
||||||
| release_organisation | Optional: The repository organisation name | |
|
| release_organisation | Optional: The repository organisation name | |
|
||||||
| release_repository_name | Optional: The repository name name | |
|
| release_repository_name | Optional: The repository name name | |
|
||||||
| release_artifacts | Optional: The list of artifacts to publish to the release generated name | [] |
|
| release_artifacts | Optional: The list of artifacts to publish to the release generated name | [] |
|
||||||
|
| release_tag_prefix | Optional: Prefix of tag | "" |
|
||||||
|
|
||||||
## Example Usage just for creating releases
|
## Example Usage just for creating releases
|
||||||
|
|
||||||
### build.py
|
### build.py
|
||||||
|
|
||||||
```python
|
```python
|
||||||
rom os import environ
|
from os import environ
|
||||||
from pybuilder.core import task, init
|
from pybuilder.core import task, init
|
||||||
from ddadevops import *
|
from ddadevops import *
|
||||||
|
|
||||||
|
|
56
infrastructure/backup/build.py
Normal file
56
infrastructure/backup/build.py
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
from os import environ
|
||||||
|
from datetime import datetime
|
||||||
|
from pybuilder.core import task, init
|
||||||
|
from ddadevops import *
|
||||||
|
import logging
|
||||||
|
|
||||||
|
name = 'dda-backup'
|
||||||
|
MODULE = 'NOT_SET'
|
||||||
|
PROJECT_ROOT_PATH = '../..'
|
||||||
|
version = "4.12.2-dev"
|
||||||
|
|
||||||
|
|
||||||
|
@init
|
||||||
|
def initialize(project):
|
||||||
|
image_tag = version
|
||||||
|
if "dev" in image_tag:
|
||||||
|
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
|
||||||
|
|
||||||
|
input = {
|
||||||
|
"name": name,
|
||||||
|
"module": MODULE,
|
||||||
|
"stage": "notused",
|
||||||
|
"project_root_path": PROJECT_ROOT_PATH,
|
||||||
|
"build_types": ["IMAGE"],
|
||||||
|
"mixin_types": [],
|
||||||
|
"image_naming": "NAME_ONLY",
|
||||||
|
"image_tag": f"{image_tag}",
|
||||||
|
}
|
||||||
|
|
||||||
|
project.build_depends_on("ddadevops>=4.7.0")
|
||||||
|
|
||||||
|
build = DevopsImageBuild(project, input)
|
||||||
|
build.initialize_build_dir()
|
||||||
|
|
||||||
|
|
||||||
|
@task
|
||||||
|
def image(project):
|
||||||
|
build = get_devops_build(project)
|
||||||
|
build.image()
|
||||||
|
|
||||||
|
@task
|
||||||
|
def test(project):
|
||||||
|
build = get_devops_build(project)
|
||||||
|
build.test()
|
||||||
|
|
||||||
|
@task
|
||||||
|
def drun(project):
|
||||||
|
build = get_devops_build(project)
|
||||||
|
build.drun()
|
||||||
|
|
||||||
|
|
||||||
|
@task
|
||||||
|
def publish(project):
|
||||||
|
build = get_devops_build(project)
|
||||||
|
build.dockerhub_login()
|
||||||
|
build.dockerhub_publish()
|
79
infrastructure/backup/doc/backup_dev_notes.md
Normal file
79
infrastructure/backup/doc/backup_dev_notes.md
Normal file
|
@ -0,0 +1,79 @@
|
||||||
|
## Init Statemachine
|
||||||
|
|
||||||
|
### Inputs
|
||||||
|
1. `restic-password: ""`
|
||||||
|
2. `restic-password-to-rotate: ""`
|
||||||
|
|
||||||
|
### Manual init the restic repository for the first time
|
||||||
|
|
||||||
|
1. apply backup-and-restore pod:
|
||||||
|
`kubectl scale deployment backup-restore --replicas=1`
|
||||||
|
2. exec into pod and execute restore pod (press tab to get your exact pod name)
|
||||||
|
`kubectl exec -it backup-restore-... -- /usr/local/bin/init.sh`
|
||||||
|
3. remove backup-and-restore pod:
|
||||||
|
`kubectl scale deployment backup-restore --replicas=0`
|
||||||
|
|
||||||
|
### Password Rotation
|
||||||
|
|
||||||
|
1. apply backup-and-restore pod:
|
||||||
|
`kubectl scale deployment backup-restore --replicas=1`
|
||||||
|
2. add new password to restic repository
|
||||||
|
`restic key add ....`
|
||||||
|
=> Trigger ::
|
||||||
|
field (1) credential current
|
||||||
|
filed (2) credential new
|
||||||
|
3. replace field (1) with (2) & clear (2)
|
||||||
|
4. remove old key - ???
|
||||||
|
`restic remove ....`
|
||||||
|
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
stateDiagram-v2
|
||||||
|
[*] --> init
|
||||||
|
init --> backup_ready: trigger, restic-password !empty
|
||||||
|
backup_ready --> new_password_added: restic-password !empty && restic-password-to-rotate !empty
|
||||||
|
new_password_added --> backup_ready: restic-password !empty && restic-password-to-rotate empty
|
||||||
|
```
|
||||||
|
|
||||||
|
### First Steps
|
||||||
|
|
||||||
|
1. Cloud Testserver hochfahren
|
||||||
|
2. Dort backup-restore deployment (leeres Secret mgl.?), neues Secret "rotation-credential-secret" als Daten
|
||||||
|
3. mounten von angelegtem Secret in Pod backup-restore
|
||||||
|
4. ba*bash*ka Skript in pod starten -> liest Secret ?leer
|
||||||
|
5. Micha cons.
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
sequenceDiagram
|
||||||
|
participant k8s
|
||||||
|
participant e as entrypoint.sh
|
||||||
|
participant rm as restic-management.clj
|
||||||
|
|
||||||
|
k8s ->> e: cronjob calls
|
||||||
|
e ->> rm: start-file
|
||||||
|
rm ->> rm: rotate
|
||||||
|
activate rm
|
||||||
|
rm ->> rm: read-backup-repository-state (state)
|
||||||
|
rm ->> rm: read-secret (backup-secret/restic-password, rotation-credential-secret/rotation-credential)
|
||||||
|
rm ->> rm: switch
|
||||||
|
activate rm
|
||||||
|
rm ->> rm: if init && restic-password != null
|
||||||
|
activate rm
|
||||||
|
rm ->> rm: init.sh
|
||||||
|
rm ->> rm: state init -> backup-ready
|
||||||
|
deactivate rm
|
||||||
|
rm ->> rm: if backup-ready && rotation-credential != null
|
||||||
|
activate rm
|
||||||
|
rm ->> rm: add-new-password-to-restic-repository.sh
|
||||||
|
rm ->> rm: state backup-ready -> new-password-added
|
||||||
|
deactivate rm
|
||||||
|
rm ->> rm: if new-password-added && rotation-credential == null
|
||||||
|
activate rm
|
||||||
|
rm ->> rm: remove-old-password-from-restic-repository.sh
|
||||||
|
rm ->> rm: state new-password-added -> backup-ready
|
||||||
|
deactivate rm
|
||||||
|
deactivate rm
|
||||||
|
|
||||||
|
rm ->> rm: store-repository-state (state)
|
||||||
|
deactivate rm
|
||||||
|
```
|
5
infrastructure/backup/image/Dockerfile
Normal file
5
infrastructure/backup/image/Dockerfile
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
FROM ubuntu:jammy
|
||||||
|
|
||||||
|
# install it
|
||||||
|
ADD resources /tmp/
|
||||||
|
RUN /tmp/install.sh
|
70
infrastructure/backup/image/resources/file-functions.sh
Normal file
70
infrastructure/backup/image/resources/file-functions.sh
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
backup_file_path='files'
|
||||||
|
|
||||||
|
function init-file-repo() {
|
||||||
|
if [ -z ${CERTIFICATE_FILE} ];
|
||||||
|
then
|
||||||
|
restic -r ${RESTIC_REPOSITORY}/${backup_file_path} -v init
|
||||||
|
else
|
||||||
|
restic -r ${RESTIC_REPOSITORY}/${backup_file_path} -v init --cacert ${CERTIFICATE_FILE}
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# First arg is the directory, second is optional for the path to a certificate file
|
||||||
|
function backup-directory() {
|
||||||
|
local directory="$1"; shift
|
||||||
|
|
||||||
|
if [ -z ${CERTIFICATE_FILE} ];
|
||||||
|
then
|
||||||
|
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache
|
||||||
|
cd ${directory} && restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} backup .
|
||||||
|
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune
|
||||||
|
else
|
||||||
|
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache --cacert ${CERTIFICATE_FILE}
|
||||||
|
cd ${directory} && restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} backup . --cacert ${CERTIFICATE_FILE}
|
||||||
|
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune --cacert ${CERTIFICATE_FILE}
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# First arg is the directory, the remaining args are the sub-directories (relative to the first directory) to backup.
|
||||||
|
function backup-fs-from-directory() {
|
||||||
|
local directory="$1"; shift
|
||||||
|
|
||||||
|
if [ -z ${CERTIFICATE_FILE} ];
|
||||||
|
then
|
||||||
|
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache
|
||||||
|
cd ${directory} && restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} backup $@
|
||||||
|
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune
|
||||||
|
else
|
||||||
|
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache --cacert ${CERTIFICATE_FILE}
|
||||||
|
cd ${directory} && restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} backup $@ --cacert ${CERTIFICATE_FILE}
|
||||||
|
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune --cacert ${CERTIFICATE_FILE}
|
||||||
|
fi
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
# Das tut so nicht!
|
||||||
|
function restore-directory() {
|
||||||
|
local directory="$1"; shift
|
||||||
|
local snapshot_id="${1:-latest}"; shift
|
||||||
|
|
||||||
|
if [ -z ${CERTIFICATE_FILE} ];
|
||||||
|
then
|
||||||
|
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache
|
||||||
|
rm -rf ${directory}*
|
||||||
|
restic -v -r $RESTIC_REPOSITORY/${backup_file_path} restore ${snapshot_id} --target ${directory}
|
||||||
|
else
|
||||||
|
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache --cacert ${CERTIFICATE_FILE}
|
||||||
|
rm -rf ${directory}*
|
||||||
|
restic -v -r $RESTIC_REPOSITORY/${backup_file_path} restore ${snapshot_id} --target ${directory} --cacert ${CERTIFICATE_FILE}
|
||||||
|
fi
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
function list-snapshot-files() {
|
||||||
|
if [ -z ${CERTIFICATE_FILE} ];
|
||||||
|
then
|
||||||
|
restic -r ${RESTIC_REPOSITORY}/${backup_file_path} snapshots
|
||||||
|
else
|
||||||
|
restic -r ${RESTIC_REPOSITORY}/${backup_file_path} snapshots --cacert ${CERTIFICATE_FILE}
|
||||||
|
fi
|
||||||
|
}
|
21
infrastructure/backup/image/resources/functions.sh
Normal file
21
infrastructure/backup/image/resources/functions.sh
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
# usage: file_env VAR [DEFAULT]
|
||||||
|
# ie: file_env 'XYZ_DB_PASSWORD' 'example'
|
||||||
|
# (will allow for "$XYZ_DB_PASSWORD_FILE" to fill in the value of
|
||||||
|
# "$XYZ_DB_PASSWORD" from a file, especially for Docker's secrets feature)
|
||||||
|
function file_env() {
|
||||||
|
local var="$1"
|
||||||
|
local fileVar="${var}_FILE"
|
||||||
|
local def="${2:-}"
|
||||||
|
if [ "${!var:-}" ] && [ "${!fileVar:-}" ]; then
|
||||||
|
echo >&2 "error: both $var and $fileVar are set (but are exclusive)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
local val="$def"
|
||||||
|
if [ "${!var:-}" ]; then
|
||||||
|
val="${!var}"
|
||||||
|
elif [ "${!fileVar:-}" ]; then
|
||||||
|
val="$(< "${!fileVar}")"
|
||||||
|
fi
|
||||||
|
export "$var"="$val"
|
||||||
|
unset "$fileVar"
|
||||||
|
}
|
36
infrastructure/backup/image/resources/install.sh
Executable file
36
infrastructure/backup/image/resources/install.sh
Executable file
|
@ -0,0 +1,36 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -exo pipefail
|
||||||
|
|
||||||
|
function babashka_install() {
|
||||||
|
babashka_version="1.3.189"
|
||||||
|
curl -SsLo /tmp/babashka-${babashka_version}-linux-amd64.tar.gz https://github.com/babashka/babashka/releases/download/v${babashka_version}/babashka-${babashka_version}-linux-amd64.tar.gz
|
||||||
|
curl -SsLo /tmp/checksum https://github.com/babashka/babashka/releases/download/v${babashka_version}/babashka-${babashka_version}-linux-amd64.tar.gz.sha256
|
||||||
|
echo " /tmp/babashka-$babashka_version-linux-amd64.tar.gz"|tee -a /tmp/checksum
|
||||||
|
sha256sum -c --status /tmp/checksum
|
||||||
|
tar -C /tmp -xzf /tmp/babashka-${babashka_version}-linux-amd64.tar.gz
|
||||||
|
install -m 0700 -o root -g root /tmp/bb /usr/local/bin/
|
||||||
|
}
|
||||||
|
|
||||||
|
function main() {
|
||||||
|
{
|
||||||
|
upgradeSystem
|
||||||
|
apt-get install -qqy ca-certificates curl gnupg postgresql-client-14 restic
|
||||||
|
curl -Ss --fail https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /etc/apt/trusted.gpg.d/postgresql-common_pgdg_archive_keyring.gpg
|
||||||
|
sh -c 'echo "deb [signed-by=/etc/apt/trusted.gpg.d/postgresql-common_pgdg_archive_keyring.gpg] https://apt.postgresql.org/pub/repos/apt jammy-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
|
||||||
|
upgradeSystem
|
||||||
|
babashka_install
|
||||||
|
} > /dev/null
|
||||||
|
|
||||||
|
update-ca-certificates
|
||||||
|
|
||||||
|
install -m 0400 /tmp/functions.sh /usr/local/lib/
|
||||||
|
install -m 0400 /tmp/pg-functions.sh /usr/local/lib/
|
||||||
|
install -m 0400 /tmp/file-functions.sh /usr/local/lib/
|
||||||
|
install -m 0740 /tmp/restic_management.clj /usr/local/bin/
|
||||||
|
|
||||||
|
cleanupDocker
|
||||||
|
}
|
||||||
|
|
||||||
|
source /tmp/install_functions_debian.sh
|
||||||
|
DEBIAN_FRONTEND=noninteractive DEBCONF_NOWARNINGS=yes main
|
149
infrastructure/backup/image/resources/pg-functions.sh
Normal file
149
infrastructure/backup/image/resources/pg-functions.sh
Normal file
|
@ -0,0 +1,149 @@
|
||||||
|
backup_pg_role_path='pg-role'
|
||||||
|
backup_pg_database_path='pg-database'
|
||||||
|
|
||||||
|
function init-command() {
|
||||||
|
restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} -v init $@
|
||||||
|
}
|
||||||
|
|
||||||
|
function init-role-repo() {
|
||||||
|
|
||||||
|
if [ -z ${CERTIFICATE_FILE} ];
|
||||||
|
then
|
||||||
|
init-command
|
||||||
|
else
|
||||||
|
init-command --cacert ${CERTIFICATE_FILE}
|
||||||
|
fi
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
function init-database-command() {
|
||||||
|
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} -v init $@
|
||||||
|
}
|
||||||
|
|
||||||
|
function init-database-repo() {
|
||||||
|
|
||||||
|
if [ -z ${CERTIFICATE_FILE} ];
|
||||||
|
then
|
||||||
|
init-database-command
|
||||||
|
else
|
||||||
|
init-database-command --cacert ${CERTIFICATE_FILE}
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function drop-create-db() {
|
||||||
|
psql -d template1 -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
|
||||||
|
--no-password -c "DROP DATABASE \"${POSTGRES_DB}\";"
|
||||||
|
psql -d template1 -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
|
||||||
|
--no-password -c "CREATE DATABASE \"${POSTGRES_DB}\";"
|
||||||
|
}
|
||||||
|
|
||||||
|
function create-pg-pass() {
|
||||||
|
local pg_host=${POSTGRES_HOST:-localhost}
|
||||||
|
|
||||||
|
echo "${pg_host}:${POSTGRES_DB}:${POSTGRES_USER}:${POSTGRES_PASSWORD}" > /root/.pgpass
|
||||||
|
echo "${POSTGRES_HOST}:template1:${POSTGRES_USER}:${POSTGRES_PASSWORD}" >> /root/.pgpass
|
||||||
|
chmod 0600 /root/.pgpass
|
||||||
|
}
|
||||||
|
|
||||||
|
function roles-unlock-command() {
|
||||||
|
restic -v -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} unlock --cleanup-cache $@
|
||||||
|
}
|
||||||
|
|
||||||
|
function roles-forget-command() {
|
||||||
|
restic -v -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune $@
|
||||||
|
}
|
||||||
|
|
||||||
|
function backup-roles() {
|
||||||
|
local role_prefix="$1"; shift
|
||||||
|
|
||||||
|
if [ -z ${CERTIFICATE_FILE} ];
|
||||||
|
then
|
||||||
|
roles-unlock-command
|
||||||
|
pg_dumpall -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U${POSTGRES_USER} --no-password --roles-only | \
|
||||||
|
grep ${role_prefix} | restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} backup --stdin
|
||||||
|
roles-forget-command
|
||||||
|
else
|
||||||
|
roles-unlock-command --cacert ${CERTIFICATE_FILE}
|
||||||
|
pg_dumpall -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U${POSTGRES_USER} --no-password --roles-only | \
|
||||||
|
grep ${role_prefix} | restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} backup --stdin --cacert ${CERTIFICATE_FILE}
|
||||||
|
roles-forget-command --cacert ${CERTIFICATE_FILE}
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function db-unlock-command() {
|
||||||
|
restic -v -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} unlock --cleanup-cache $@
|
||||||
|
}
|
||||||
|
|
||||||
|
function db-forget-command() {
|
||||||
|
restic -v -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune $@
|
||||||
|
}
|
||||||
|
|
||||||
|
function backup-db-dump() {
|
||||||
|
|
||||||
|
if [ -z ${CERTIFICATE_FILE} ];
|
||||||
|
then
|
||||||
|
db-unlock-command
|
||||||
|
pg_dump -d ${POSTGRES_DB} -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} \
|
||||||
|
-U ${POSTGRES_USER} --no-password --serializable-deferrable | \
|
||||||
|
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} backup --stdin
|
||||||
|
db-forget-command
|
||||||
|
else
|
||||||
|
db-unlock-command --cacert ${CERTIFICATE_FILE}
|
||||||
|
pg_dump -d ${POSTGRES_DB} -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} \
|
||||||
|
-U ${POSTGRES_USER} --no-password --serializable-deferrable | \
|
||||||
|
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} backup --stdin --cacert ${CERTIFICATE_FILE}
|
||||||
|
db-forget-command --cacert ${CERTIFICATE_FILE}
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function restore-roles() {
|
||||||
|
local snapshot_id="${1:-latest}"; shift
|
||||||
|
|
||||||
|
if [ -z ${CERTIFICATE_FILE} ];
|
||||||
|
then
|
||||||
|
roles-unlock-command
|
||||||
|
restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} dump ${snapshot_id} stdin | \
|
||||||
|
psql -d template1 -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
|
||||||
|
--no-password
|
||||||
|
else
|
||||||
|
roles-unlock-command --cacert ${CERTIFICATE_FILE}
|
||||||
|
restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} dump ${snapshot_id} stdin --cacert ${CERTIFICATE_FILE} | \
|
||||||
|
psql -d template1 -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
|
||||||
|
--no-password
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function restore-db() {
|
||||||
|
local snapshot_id="${1:-latest}"; shift
|
||||||
|
|
||||||
|
if [ -z ${CERTIFICATE_FILE} ];
|
||||||
|
then
|
||||||
|
db-unlock-command
|
||||||
|
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} dump ${snapshot_id} stdin | \
|
||||||
|
psql -d ${POSTGRES_DB} -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
|
||||||
|
--no-password
|
||||||
|
else
|
||||||
|
db-unlock-command --cacert ${CERTIFICATE_FILE}
|
||||||
|
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} dump ${snapshot_id} stdin --cacert ${CERTIFICATE_FILE} | \
|
||||||
|
psql -d ${POSTGRES_DB} -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
|
||||||
|
--no-password
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function list-snapshot-roles() {
|
||||||
|
if [ -z ${CERTIFICATE_FILE} ];
|
||||||
|
then
|
||||||
|
restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} snapshots
|
||||||
|
else
|
||||||
|
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} snapshots --cacert ${CERTIFICATE_FILE}
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function list-snapshot-db() {
|
||||||
|
if [ -z ${CERTIFICATE_FILE} ];
|
||||||
|
then
|
||||||
|
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} snapshots
|
||||||
|
else
|
||||||
|
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} snapshots --cacert ${CERTIFICATE_FILE}
|
||||||
|
fi
|
||||||
|
}
|
51
infrastructure/backup/image/resources/restic_management.clj
Executable file
51
infrastructure/backup/image/resources/restic_management.clj
Executable file
|
@ -0,0 +1,51 @@
|
||||||
|
#! /usr/bin/env bb
|
||||||
|
|
||||||
|
(ns restic-management
|
||||||
|
(:require
|
||||||
|
[clojure.spec.alpha :as s]
|
||||||
|
[clojure.java.io :as io]
|
||||||
|
[clojure.edn :as edn]))
|
||||||
|
|
||||||
|
(s/def ::state string?)
|
||||||
|
|
||||||
|
(s/def ::backup-repository-state
|
||||||
|
(s/keys :req-un [::state]))
|
||||||
|
|
||||||
|
(def state {:state ""})
|
||||||
|
|
||||||
|
(defn store-backup-repository-state [s]
|
||||||
|
(spit "backup-repository-state.edn" s))
|
||||||
|
|
||||||
|
(defn read-backup-repository-state []
|
||||||
|
(try
|
||||||
|
(with-open [r (io/reader "backup-repository-state.edn")]
|
||||||
|
(edn/read (java.io.PushbackReader. r)))
|
||||||
|
|
||||||
|
(catch java.io.IOException e
|
||||||
|
(printf "Couldn't open '%s': %s\n" "backup-repository-state.edn" (.getMessage e)))
|
||||||
|
(catch RuntimeException e
|
||||||
|
(printf "Error parsing edn file '%s': %s\n" "backup-repository-state.edn" (.getMessage e)))))
|
||||||
|
|
||||||
|
(defn read-secret [s]
|
||||||
|
(slurp (str "/var/run/secrets/" s)))
|
||||||
|
;"/var/run/secrets/rotation-credential-secret/rotation-credential"))
|
||||||
|
|
||||||
|
;(println (read-backup-repository-state))
|
||||||
|
|
||||||
|
;(println (:state (read-backup-repository-state)))
|
||||||
|
|
||||||
|
;(println (s/valid? ::backup-repository-state (read-backup-repository-state)))
|
||||||
|
|
||||||
|
(println (read-secret "rotation-credential-secret/rotation-credential"))
|
||||||
|
(println (read-secret "backup-secrets/restic-password"))
|
||||||
|
|
||||||
|
(s/def ::new-password string?)
|
||||||
|
(s/def ::old-password string?)
|
||||||
|
(s/def ::password-state
|
||||||
|
(s/keys :req-un [::new-password ::old-password]))
|
||||||
|
|
||||||
|
(defn rotate []
|
||||||
|
(let [state {:new-password (read-secret "rotation-credential-secret/rotation-credential")
|
||||||
|
:old-password (read-secret "backup-secrets/restic-password")}]
|
||||||
|
(store-backup-repository-state (prn-str state))))
|
||||||
|
(rotate)
|
7
infrastructure/backup/test/Dockerfile
Normal file
7
infrastructure/backup/test/Dockerfile
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
FROM dda-backup:latest
|
||||||
|
|
||||||
|
# install it
|
||||||
|
RUN apt update && apt install -qqy openjdk-17-jre-headless
|
||||||
|
ADD resources /tmp/
|
||||||
|
RUN rm -rf /root/.m2
|
||||||
|
RUN /tmp/install-test.bb
|
4
infrastructure/backup/test/resources/bb.edn
Normal file
4
infrastructure/backup/test/resources/bb.edn
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
{:deps {org.clojure/spec.alpha {:mvn/version "0.4.233"}
|
||||||
|
orchestra/orchestra {:mvn/version "2021.01.01-1"}
|
||||||
|
org.domaindrivenarchitecture/dda-backup {:mvn/version "0.1.1-SNAPSHOT"}}}
|
||||||
|
|
32
infrastructure/backup/test/resources/install-test.bb
Executable file
32
infrastructure/backup/test/resources/install-test.bb
Executable file
|
@ -0,0 +1,32 @@
|
||||||
|
#!/usr/bin/env bb
|
||||||
|
|
||||||
|
(require '[babashka.tasks :as tasks])
|
||||||
|
|
||||||
|
(defn curl-and-check!
|
||||||
|
[filename artifact-url sha256-url]
|
||||||
|
(let [filepath (str "/tmp/" filename)]
|
||||||
|
(tasks/shell "curl" "-SsLo" filepath artifact-url)
|
||||||
|
(tasks/shell "curl" "-SsLo" "/tmp/checksum" sha256-url)
|
||||||
|
(tasks/shell "bash" "-c" (str "echo \" " filepath "\"|tee -a /tmp/checksum"))
|
||||||
|
;(tasks/shell "sha256sum" "-c" "--status" "/tmp/checksum")
|
||||||
|
))
|
||||||
|
|
||||||
|
(defn tar-install!
|
||||||
|
[filename binname]
|
||||||
|
(let [filepath (str "/tmp/" filename)]
|
||||||
|
(tasks/shell "tar" "-C" "/tmp" "-xzf" filepath)
|
||||||
|
(tasks/shell "install" "-m" "0700" "-o" "root" "-g" "root" (str "/tmp/" binname) "/usr/local/bin/")))
|
||||||
|
|
||||||
|
(defn install!
|
||||||
|
[filename]
|
||||||
|
(tasks/shell "install" "-m" "0700" "-o" "root" "-g" "root" (str "/tmp/" filename) "/usr/local/bin/"))
|
||||||
|
|
||||||
|
(tasks/shell "bb" "/tmp/test.bb")
|
||||||
|
(curl-and-check!
|
||||||
|
"provs-syspec.jar"
|
||||||
|
"https://repo.prod.meissa.de/attachments/0a1da41e-aa5b-4a3e-a3b1-215cf2d5b021"
|
||||||
|
"https://repo.prod.meissa.de/attachments/f227cf65-cb0f-46a7-a6cd-28f46917412a")
|
||||||
|
(install! "provs-syspec.jar")
|
||||||
|
(tasks/shell "apt" "update")
|
||||||
|
(tasks/shell "apt" "install" "-qqy" "openjdk-17-jre-headless")
|
||||||
|
(tasks/shell "java" "-jar" "/usr/local/bin/provs-syspec.jar" "local" "-c" "/tmp/spec.yml" )
|
7
infrastructure/backup/test/resources/spec.yml
Normal file
7
infrastructure/backup/test/resources/spec.yml
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
package:
|
||||||
|
- name: "restic"
|
||||||
|
|
||||||
|
command:
|
||||||
|
- command: "bb -h"
|
||||||
|
- command: "/tmp/test.bb"
|
||||||
|
|
27
infrastructure/backup/test/resources/test.bb
Executable file
27
infrastructure/backup/test/resources/test.bb
Executable file
|
@ -0,0 +1,27 @@
|
||||||
|
#!/usr/bin/env bb
|
||||||
|
|
||||||
|
(require '[babashka.tasks :as tasks]
|
||||||
|
'[dda.backup.management :as mgm])
|
||||||
|
|
||||||
|
(defn restic-repo-init!
|
||||||
|
[]
|
||||||
|
(spit "restic-pwd" "ThePassword")
|
||||||
|
(mgm/init! {:password-file "restic-pwd"
|
||||||
|
:restic-repository "restic-repo"}))
|
||||||
|
|
||||||
|
(defn restic-backup!
|
||||||
|
[]
|
||||||
|
(tasks/shell "mkdir" "test-backup")
|
||||||
|
(spit "test-backup/file" "I was here")
|
||||||
|
(tasks/shell "restic" "backup" "--password-file" "restic-pwd" "--repo" "restic-repo" "test-backup"))
|
||||||
|
|
||||||
|
(defn restic-restore!
|
||||||
|
[]
|
||||||
|
(tasks/shell "mkdir" "test-restore")
|
||||||
|
(tasks/shell "restic" "restore" "--password-file" "restic-pwd" "--repo" "restic-repo" "--target" "test-restore" "latest")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
(restic-repo-init!)
|
||||||
|
(restic-backup!)
|
||||||
|
(restic-restore!)
|
|
@ -6,7 +6,7 @@ from ddadevops import *
|
||||||
name = "ddadevops"
|
name = "ddadevops"
|
||||||
MODULE = "clj-cljs"
|
MODULE = "clj-cljs"
|
||||||
PROJECT_ROOT_PATH = "../.."
|
PROJECT_ROOT_PATH = "../.."
|
||||||
version = "4.7.4"
|
version = "4.13.2-dev"
|
||||||
|
|
||||||
@init
|
@init
|
||||||
def initialize(project):
|
def initialize(project):
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
478604fe85c711aafe8ef78c0bf25cb93fa46de5a3c07040f25a595096c43f8a kubeconform-v0.6.3.tar.gz
|
|
|
@ -1,32 +1,45 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -eux
|
set -exo pipefail
|
||||||
|
|
||||||
function main() {
|
function main() {
|
||||||
|
{
|
||||||
upgradeSystem
|
upgradeSystem
|
||||||
|
|
||||||
mkdir -p /usr/share/man/man1
|
mkdir -p /usr/share/man/man1
|
||||||
apt -qqy install openjdk-17-jre-headless leiningen curl
|
apt-get -qqy install curl openjdk-17-jre-headless leiningen
|
||||||
|
|
||||||
# shadow-cljs
|
# shadow-cljs
|
||||||
|
npm install -g npm
|
||||||
npm install -g --save-dev shadow-cljs
|
npm install -g --save-dev shadow-cljs
|
||||||
|
|
||||||
# download kubeconform & graalvm
|
# download kubeconform & graalvm
|
||||||
curl -Lo /tmp/kubeconform-v0.6.3.tar.gz https://github.com/yannh/kubeconform/releases/download/v0.6.3/kubeconform-linux-amd64.tar.gz
|
kubeconform_version="0.6.4"
|
||||||
|
|
||||||
# checksum
|
curl -SsLo /tmp/kubeconform-linux-amd64.tar.gz https://github.com/yannh/kubeconform/releases/download/v${kubeconform_version}/kubeconform-linux-amd64.tar.gz
|
||||||
cd /tmp
|
curl -SsLo /tmp/CHECKSUMS https://github.com/yannh/kubeconform/releases/download/v${kubeconform_version}/CHECKSUMS
|
||||||
sha256sum --check CHECKSUMS
|
|
||||||
|
# checksum kubeconform
|
||||||
|
checksum
|
||||||
|
|
||||||
# install kubeconform
|
# install kubeconform
|
||||||
tar -xf /tmp/kubeconform-v0.6.3.tar.gz
|
tar -C /usr/local/bin -xf /tmp/kubeconform-linux-amd64.tar.gz --exclude=LICENSE
|
||||||
cp kubeconform /usr/local/bin
|
|
||||||
|
|
||||||
#install pyb
|
#install pyb
|
||||||
apt -qqy install python3 python3-pip git;
|
apt-get -qqy install python3 python3-pip git
|
||||||
pip3 install pybuilder 'ddadevops>=4.7.0' deprecation dda-python-terraform boto3 pyyaml inflection --break-system-packages;
|
pip3 install pybuilder 'ddadevops>=4.7.0' deprecation dda-python-terraform boto3 pyyaml inflection --break-system-packages
|
||||||
|
|
||||||
|
#check
|
||||||
|
lein --help
|
||||||
|
|
||||||
cleanupDocker
|
cleanupDocker
|
||||||
|
} > /dev/null
|
||||||
}
|
}
|
||||||
|
|
||||||
source /tmp/install_functions.sh
|
function checksum() {
|
||||||
main
|
awk '{print $1 " /tmp/" $2}' /tmp/CHECKSUMS|sed -n '2p' > /tmp/kubeconform-checksum
|
||||||
|
cat /tmp/kubeconform-checksum
|
||||||
|
sha256sum -c --status /tmp/kubeconform-checksum
|
||||||
|
}
|
||||||
|
|
||||||
|
source /tmp/install_functions_debian.sh
|
||||||
|
DEBIAN_FRONTEND=noninteractive DEBCONF_NOWARNINGS=yes main
|
|
@ -6,7 +6,7 @@ from ddadevops import *
|
||||||
name = "ddadevops"
|
name = "ddadevops"
|
||||||
MODULE = "clj"
|
MODULE = "clj"
|
||||||
PROJECT_ROOT_PATH = "../.."
|
PROJECT_ROOT_PATH = "../.."
|
||||||
version = "4.7.4"
|
version = "4.13.2-dev"
|
||||||
|
|
||||||
@init
|
@init
|
||||||
def initialize(project):
|
def initialize(project):
|
||||||
|
|
|
@ -3,4 +3,4 @@ FROM debian:stable-slim
|
||||||
ADD resources /tmp
|
ADD resources /tmp
|
||||||
RUN /tmp/install.sh
|
RUN /tmp/install.sh
|
||||||
ENV LANG=en_US.UTF-8 \
|
ENV LANG=en_US.UTF-8 \
|
||||||
JAVA_HOME=/usr/lib64/graalvm/graalvm-community-java17
|
JAVA_HOME=/usr/lib/jvm/graalvm
|
|
@ -1,2 +0,0 @@
|
||||||
478604fe85c711aafe8ef78c0bf25cb93fa46de5a3c07040f25a595096c43f8a kubeconform-v0.6.3.tar.gz
|
|
||||||
094e5a7dcc4a903b70741d5c3c1688f83e83e2d44eb3d8d798c5d79ed902032c graalvm-community-jdk-17.0.7_linux-x64_bin.tar.gz
|
|
|
@ -1,38 +1,57 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -eux
|
set -exo pipefail
|
||||||
|
|
||||||
function main() {
|
function main() {
|
||||||
|
{
|
||||||
upgradeSystem
|
upgradeSystem
|
||||||
|
|
||||||
apt -qqy install curl git openjdk-17-jre-headless leiningen build-essential libz-dev zlib1g-dev;
|
apt-get -qqy install curl git openjdk-17-jre-headless leiningen build-essential libz-dev zlib1g-dev
|
||||||
|
|
||||||
|
|
||||||
# download kubeconform & graalvm
|
# download kubeconform & graalvm
|
||||||
curl -Lo /tmp/kubeconform-v0.6.3.tar.gz https://github.com/yannh/kubeconform/releases/download/v0.6.3/kubeconform-linux-amd64.tar.gz
|
kubeconform_version="0.6.4"
|
||||||
curl -Lo /tmp/graalvm-community-jdk-17.0.7_linux-x64_bin.tar.gz https://github.com/graalvm/graalvm-ce-builds/releases/download/jdk-17.0.7/graalvm-community-jdk-17.0.7_linux-x64_bin.tar.gz
|
graalvm_jdk_version="21.0.2"
|
||||||
|
|
||||||
# checksum
|
curl -SsLo /tmp/kubeconform-linux-amd64.tar.gz https://github.com/yannh/kubeconform/releases/download/v${kubeconform_version}/kubeconform-linux-amd64.tar.gz
|
||||||
cd /tmp
|
curl -SsLo /tmp/CHECKSUMS https://github.com/yannh/kubeconform/releases/download/v${kubeconform_version}/CHECKSUMS
|
||||||
sha256sum --check CHECKSUMS
|
curl -SsLo /tmp/graalvm-community-jdk.tar.gz https://github.com/graalvm/graalvm-ce-builds/releases/download/jdk-${graalvm_jdk_version}/graalvm-community-jdk-${graalvm_jdk_version}_linux-x64_bin.tar.gz
|
||||||
|
curl -SsLo /tmp/graalvm-checksum https://github.com/graalvm/graalvm-ce-builds/releases/download/jdk-${graalvm_jdk_version}/graalvm-community-jdk-${graalvm_jdk_version}_linux-x64_bin.tar.gz.sha256
|
||||||
|
|
||||||
|
# checksum kubeconform & graalvm-jdk
|
||||||
|
checksum
|
||||||
|
|
||||||
# install kubeconform
|
# install kubeconform
|
||||||
tar -xf /tmp/kubeconform-v0.6.3.tar.gz
|
tar -C /usr/local/bin -xf /tmp/kubeconform-linux-amd64.tar.gz --exclude=LICENSE
|
||||||
cp kubeconform /usr/local/bin
|
|
||||||
|
|
||||||
# install graalvm
|
# install graalvm
|
||||||
tar -xzf graalvm-community-jdk-17.0.7_linux-x64_bin.tar.gz
|
tar -C /usr/lib/jvm/ -xf /tmp/graalvm-community-jdk.tar.gz
|
||||||
mv graalvm-community-openjdk-17.0.7+7.1 /usr/lib/jvm/
|
dirname_graalvm=$(ls /usr/lib/jvm/|grep -e graa)
|
||||||
ln -s /usr/lib/jvm/graalvm-community-openjdk-17.0.7+7.1 /usr/lib/jvm/graalvm
|
ln -s /usr/lib/jvm/$dirname_graalvm /usr/lib/jvm/graalvm
|
||||||
ln -s /usr/lib/jvm/graalvm/bin/gu /usr/local/bin
|
ln -s /usr/lib/jvm/graalvm/bin/gu /usr/local/bin
|
||||||
update-alternatives --install /usr/bin/java java /usr/lib/jvm/graalvm/bin/java 2
|
update-alternatives --install /usr/bin/java java /usr/lib/jvm/graalvm/bin/java 2
|
||||||
gu install native-image
|
|
||||||
ln -s /usr/lib/jvm/graalvm/bin/native-image /usr/local/bin
|
ln -s /usr/lib/jvm/graalvm/bin/native-image /usr/local/bin
|
||||||
|
|
||||||
#install pyb
|
#install pyb
|
||||||
apt -qqy install python3 python3-pip;
|
apt-get -qqy install python3 python3-pip
|
||||||
pip3 install pybuilder 'ddadevops>=4.7.0' deprecation dda-python-terraform boto3 pyyaml inflection --break-system-packages;
|
pip3 install pybuilder 'ddadevops>=4.7.0' deprecation dda-python-terraform boto3 pyyaml inflection --break-system-packages
|
||||||
|
|
||||||
|
#check
|
||||||
|
native-image --version
|
||||||
|
lein -v
|
||||||
|
|
||||||
cleanupDocker
|
cleanupDocker
|
||||||
|
} > /dev/null
|
||||||
}
|
}
|
||||||
|
|
||||||
source /tmp/install_functions.sh
|
function checksum() {
|
||||||
main
|
#kubeconform
|
||||||
|
awk '{print $1 " /tmp/" $2}' /tmp/CHECKSUMS|sed -n '2p' > /tmp/kubeconform-checksum
|
||||||
|
sha256sum -c --status /tmp/kubeconform-checksum
|
||||||
|
|
||||||
|
#graalvm
|
||||||
|
echo " /tmp/graalvm-community-jdk.tar.gz"|tee -a /tmp/graalvm-checksum
|
||||||
|
sha256sum -c --status /tmp/graalvm-checksum
|
||||||
|
}
|
||||||
|
|
||||||
|
source /tmp/install_functions_debian.sh
|
||||||
|
DEBIAN_FRONTEND=noninteractive DEBCONF_NOWARNINGS=yes main
|
|
@ -6,7 +6,7 @@ from ddadevops import *
|
||||||
name = "ddadevops"
|
name = "ddadevops"
|
||||||
MODULE = "ddadevops"
|
MODULE = "ddadevops"
|
||||||
PROJECT_ROOT_PATH = "../.."
|
PROJECT_ROOT_PATH = "../.."
|
||||||
version = "4.7.4"
|
version = "4.13.2-dev"
|
||||||
|
|
||||||
|
|
||||||
@init
|
@init
|
||||||
|
@ -26,7 +26,7 @@ def initialize(project):
|
||||||
"image_tag": f"{image_tag}",
|
"image_tag": f"{image_tag}",
|
||||||
}
|
}
|
||||||
|
|
||||||
project.build_depends_on("ddadevops>=4.0.0")
|
project.build_depends_on("ddadevops>=4.9.0")
|
||||||
|
|
||||||
build = DevopsImageBuild(project, input)
|
build = DevopsImageBuild(project, input)
|
||||||
build.initialize_build_dir()
|
build.initialize_build_dir()
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
FROM python:3.10-alpine
|
FROM python:3.10-alpine
|
||||||
|
|
||||||
RUN set -eux;
|
ADD resources /tmp
|
||||||
RUN apk add --no-cache python3 py3-pip openssl-dev bash git curl;
|
|
||||||
RUN python3 -m pip install -U pip;
|
RUN /tmp/install.sh
|
||||||
RUN pip3 install pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml inflection;
|
|
||||||
|
|
19
infrastructure/ddadevops/image/resources/install.sh
Executable file
19
infrastructure/ddadevops/image/resources/install.sh
Executable file
|
@ -0,0 +1,19 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
set -exo pipefail
|
||||||
|
|
||||||
|
function main() {
|
||||||
|
{
|
||||||
|
upgradeSystem
|
||||||
|
|
||||||
|
apk add --no-cache python3 py3-pip openssl-dev bash git curl
|
||||||
|
python3 -m pip install -U pip
|
||||||
|
pip3 install pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml inflection
|
||||||
|
|
||||||
|
cleanupDocker
|
||||||
|
|
||||||
|
} > /dev/null
|
||||||
|
}
|
||||||
|
|
||||||
|
source /tmp/install_functions_alpine.sh
|
||||||
|
main
|
|
@ -6,7 +6,7 @@ from ddadevops import *
|
||||||
name = "ddadevops"
|
name = "ddadevops"
|
||||||
MODULE = "dind"
|
MODULE = "dind"
|
||||||
PROJECT_ROOT_PATH = "../.."
|
PROJECT_ROOT_PATH = "../.."
|
||||||
version = "4.7.4"
|
version = "4.13.2-dev"
|
||||||
|
|
||||||
|
|
||||||
@init
|
@init
|
||||||
|
@ -26,7 +26,7 @@ def initialize(project):
|
||||||
"image_tag": f"{image_tag}",
|
"image_tag": f"{image_tag}",
|
||||||
}
|
}
|
||||||
|
|
||||||
project.build_depends_on("ddadevops>=4.0.0")
|
project.build_depends_on("ddadevops>=4.7.0")
|
||||||
|
|
||||||
build = DevopsImageBuild(project, input)
|
build = DevopsImageBuild(project, input)
|
||||||
build.initialize_build_dir()
|
build.initialize_build_dir()
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
FROM docker:latest
|
FROM docker:latest
|
||||||
|
|
||||||
RUN set -eux;
|
WORKDIR /tmp
|
||||||
RUN apk add --no-cache python3 py3-pip openssl-dev bash git;
|
ADD resources ./
|
||||||
RUN python3 -m pip install -U pip;
|
RUN ./install.sh
|
||||||
RUN pip3 install pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml inflection;
|
|
||||||
|
|
17
infrastructure/dind/image/resources/install.sh
Executable file
17
infrastructure/dind/image/resources/install.sh
Executable file
|
@ -0,0 +1,17 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
set -exo pipefail
|
||||||
|
|
||||||
|
function main() {
|
||||||
|
{
|
||||||
|
upgradeSystem
|
||||||
|
|
||||||
|
apk add --no-cache python3 py3-pip openssl-dev bash git
|
||||||
|
pip3 install --break-system-packages pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml inflection
|
||||||
|
|
||||||
|
cleanupDocker
|
||||||
|
} > /dev/null
|
||||||
|
}
|
||||||
|
|
||||||
|
source /tmp/install_functions_alpine.sh
|
||||||
|
main
|
57
infrastructure/kotlin/build.py
Normal file
57
infrastructure/kotlin/build.py
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
from os import environ
|
||||||
|
from datetime import datetime
|
||||||
|
from pybuilder.core import task, init
|
||||||
|
from ddadevops import *
|
||||||
|
|
||||||
|
name = "ddadevops"
|
||||||
|
MODULE = "kotlin"
|
||||||
|
PROJECT_ROOT_PATH = "../.."
|
||||||
|
version = "4.13.2-dev"
|
||||||
|
|
||||||
|
|
||||||
|
@init
|
||||||
|
def initialize(project):
|
||||||
|
image_tag = version
|
||||||
|
if "dev" in image_tag:
|
||||||
|
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
|
||||||
|
|
||||||
|
input = {
|
||||||
|
"name": name,
|
||||||
|
"module": MODULE,
|
||||||
|
"stage": "notused",
|
||||||
|
"project_root_path": PROJECT_ROOT_PATH,
|
||||||
|
"build_types": ["IMAGE"],
|
||||||
|
"mixin_types": [],
|
||||||
|
"image_naming": "NAME_AND_MODULE",
|
||||||
|
"image_tag": f"{image_tag}",
|
||||||
|
}
|
||||||
|
|
||||||
|
project.build_depends_on("ddadevops>=4.0.0")
|
||||||
|
|
||||||
|
build = DevopsImageBuild(project, input)
|
||||||
|
build.initialize_build_dir()
|
||||||
|
|
||||||
|
|
||||||
|
@task
|
||||||
|
def image(project):
|
||||||
|
build = get_devops_build(project)
|
||||||
|
build.image()
|
||||||
|
|
||||||
|
|
||||||
|
@task
|
||||||
|
def drun(project):
|
||||||
|
build = get_devops_build(project)
|
||||||
|
build.drun()
|
||||||
|
|
||||||
|
|
||||||
|
@task
|
||||||
|
def test(project):
|
||||||
|
build = get_devops_build(project)
|
||||||
|
build.test()
|
||||||
|
|
||||||
|
|
||||||
|
@task
|
||||||
|
def publish(project):
|
||||||
|
build = get_devops_build(project)
|
||||||
|
build.dockerhub_login()
|
||||||
|
build.dockerhub_publish()
|
4
infrastructure/kotlin/image/Dockerfile
Normal file
4
infrastructure/kotlin/image/Dockerfile
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
FROM debian:stable-slim
|
||||||
|
|
||||||
|
ADD resources /tmp
|
||||||
|
RUN /tmp/install.sh
|
17
infrastructure/kotlin/image/resources/install.sh
Executable file
17
infrastructure/kotlin/image/resources/install.sh
Executable file
|
@ -0,0 +1,17 @@
|
||||||
|
#!/bin/bash
|
||||||
|
set -exo pipefail
|
||||||
|
|
||||||
|
function main() {
|
||||||
|
{
|
||||||
|
upgradeSystem
|
||||||
|
|
||||||
|
apt-get -qqy install curl git kotlin gradle iputils-ping ssh python3 python3-pip
|
||||||
|
|
||||||
|
pip3 install --break-system-packages pybuilder 'ddadevops>=4.7.0' deprecation dda-python-terraform boto3 pyyaml inflection
|
||||||
|
|
||||||
|
cleanupDocker
|
||||||
|
} > /dev/null
|
||||||
|
}
|
||||||
|
|
||||||
|
source /tmp/install_functions_debian.sh
|
||||||
|
DEBIAN_FRONTEND=noninteractive DEBCONF_NOWARNINGS=yes main
|
|
@ -6,7 +6,7 @@ from ddadevops import *
|
||||||
name = "ddadevops"
|
name = "ddadevops"
|
||||||
MODULE = "python"
|
MODULE = "python"
|
||||||
PROJECT_ROOT_PATH = "../.."
|
PROJECT_ROOT_PATH = "../.."
|
||||||
version = "4.7.4"
|
version = "4.13.2-dev"
|
||||||
|
|
||||||
|
|
||||||
@init
|
@init
|
||||||
|
|
|
@ -1,7 +1,4 @@
|
||||||
FROM python:3.10-alpine
|
FROM python:3.10-alpine
|
||||||
|
|
||||||
RUN set -eux;
|
ADD resources /tmp
|
||||||
RUN apk add --no-cache build-base rust python3 python3-dev py3-pip py3-setuptools py3-wheel libffi-dev openssl-dev cargo bash git curl;
|
RUN /tmp/install.sh
|
||||||
RUN python3 -m pip install -U pip;
|
|
||||||
RUN pip3 install pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml inflection;
|
|
||||||
RUN pip3 install coverage flake8 flake8-polyfill mypy mypy-extensions pycodestyle pyflakes pylint pytest pytest-cov pytest-datafiles types-setuptools types-PyYAML;
|
|
20
infrastructure/python/image/resources/install.sh
Executable file
20
infrastructure/python/image/resources/install.sh
Executable file
|
@ -0,0 +1,20 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
set -exo pipefail
|
||||||
|
|
||||||
|
function main() {
|
||||||
|
{
|
||||||
|
upgradeSystem
|
||||||
|
|
||||||
|
apk add --no-cache build-base rust python3 python3-dev py3-pip py3-setuptools py3-wheel libffi-dev openssl-dev cargo bash git curl
|
||||||
|
python3 -m pip install -U pip
|
||||||
|
pip3 install pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml inflection \
|
||||||
|
coverage flake8 flake8-polyfill mypy mypy-extensions pycodestyle pyflakes pylint pytest pytest-cov pytest-datafiles types-setuptools types-PyYAML
|
||||||
|
|
||||||
|
cleanupDocker
|
||||||
|
|
||||||
|
} > /dev/null
|
||||||
|
}
|
||||||
|
|
||||||
|
source /tmp/install_functions_alpine.sh
|
||||||
|
main
|
|
@ -31,6 +31,12 @@ class ImageBuildService:
|
||||||
self.__copy_build_resource_file_from_package__(
|
self.__copy_build_resource_file_from_package__(
|
||||||
"image/resources/install_functions.sh", devops
|
"image/resources/install_functions.sh", devops
|
||||||
)
|
)
|
||||||
|
self.__copy_build_resource_file_from_package__(
|
||||||
|
"image/resources/install_functions_debian.sh", devops
|
||||||
|
)
|
||||||
|
self.__copy_build_resource_file_from_package__(
|
||||||
|
"image/resources/install_functions_alpine.sh", devops
|
||||||
|
)
|
||||||
|
|
||||||
def __copy_build_resources_from_dir__(self, devops: Devops):
|
def __copy_build_resources_from_dir__(self, devops: Devops):
|
||||||
image = devops.specialized_builds[BuildType.IMAGE]
|
image = devops.specialized_builds[BuildType.IMAGE]
|
||||||
|
@ -45,7 +51,10 @@ class ImageBuildService:
|
||||||
else:
|
else:
|
||||||
self.__copy_build_resources_from_dir__(devops)
|
self.__copy_build_resources_from_dir__(devops)
|
||||||
self.file_api.cp_recursive("image", build_path)
|
self.file_api.cp_recursive("image", build_path)
|
||||||
self.file_api.cp_recursive("test", build_path)
|
try:
|
||||||
|
self.file_api.cp_recursive("test", build_path)
|
||||||
|
except:
|
||||||
|
print("Folder 'test' not found")
|
||||||
|
|
||||||
def image(self, devops: Devops):
|
def image(self, devops: Devops):
|
||||||
image = devops.specialized_builds[BuildType.IMAGE]
|
image = devops.specialized_builds[BuildType.IMAGE]
|
||||||
|
|
|
@ -53,7 +53,8 @@ class ReleaseService:
|
||||||
bump_version = release_version.create_bump()
|
bump_version = release_version.create_bump()
|
||||||
release_message = f"release: {release_version.to_string()}"
|
release_message = f"release: {release_version.to_string()}"
|
||||||
bump_message = f"bump version to: {bump_version.to_string()}"
|
bump_message = f"bump version to: {bump_version.to_string()}"
|
||||||
self.git_api.tag_annotated(release_version.to_string(), release_message, 0)
|
release_tag = f"{release.release_tag_prefix}{release_version.to_string()}"
|
||||||
|
self.git_api.tag_annotated(release_tag, release_message, 0)
|
||||||
self.__set_version_and_commit__(
|
self.__set_version_and_commit__(
|
||||||
bump_version,
|
bump_version,
|
||||||
release.build_files(),
|
release.build_files(),
|
||||||
|
|
|
@ -11,6 +11,7 @@ class BuildFileType(Enum):
|
||||||
JS = ".json"
|
JS = ".json"
|
||||||
JAVA_GRADLE = ".gradle"
|
JAVA_GRADLE = ".gradle"
|
||||||
JAVA_CLOJURE = ".clj"
|
JAVA_CLOJURE = ".clj"
|
||||||
|
JAVA_CLOJURE_EDN = ".edn"
|
||||||
PYTHON = ".py"
|
PYTHON = ".py"
|
||||||
|
|
||||||
|
|
||||||
|
@ -41,40 +42,40 @@ class BuildFile(Validateable):
|
||||||
result = BuildFileType.JAVA_CLOJURE
|
result = BuildFileType.JAVA_CLOJURE
|
||||||
case ".py":
|
case ".py":
|
||||||
result = BuildFileType.PYTHON
|
result = BuildFileType.PYTHON
|
||||||
|
case ".edn":
|
||||||
|
result = BuildFileType.JAVA_CLOJURE_EDN
|
||||||
case _:
|
case _:
|
||||||
result = None
|
result = None
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
def __get_file_type_regex_str(self, file_type: BuildFileType):
|
||||||
|
match file_type:
|
||||||
|
case BuildFileType.JAVA_GRADLE:
|
||||||
|
return r"(?P<pre_version>\bversion\s?=\s?)\"(?P<version>\d*\.\d*\.\d*(-SNAPSHOT)?)\""
|
||||||
|
case BuildFileType.PYTHON:
|
||||||
|
return r"(?P<pre_version>\bversion\s?=\s?)\"(?P<version>\d*\.\d*\.\d*(-SNAPSHOT|-dev\d*)?)\""
|
||||||
|
case BuildFileType.JAVA_CLOJURE:
|
||||||
|
return r"(?P<pre_version>\(defproject\s(\S)*\s)\"(?P<version>\d*\.\d*\.\d*(-SNAPSHOT)?)\""
|
||||||
|
case BuildFileType.JAVA_CLOJURE_EDN:
|
||||||
|
return r"(?P<pre_version>\:version\s+)\"(?P<version>\d*\.\d*\.\d*(-SNAPSHOT)?)\""
|
||||||
|
case _:
|
||||||
|
return ""
|
||||||
|
|
||||||
def get_version(self) -> Version:
|
def get_version(self) -> Version:
|
||||||
try:
|
try:
|
||||||
match self.build_file_type():
|
build_file_type = self.build_file_type()
|
||||||
|
match build_file_type:
|
||||||
case BuildFileType.JS:
|
case BuildFileType.JS:
|
||||||
version_str = json.loads(self.content)["version"]
|
version_str = json.loads(self.content)["version"]
|
||||||
case BuildFileType.JAVA_GRADLE:
|
case (
|
||||||
# TODO: '\nversion = ' will not parse all ?!
|
BuildFileType.JAVA_GRADLE
|
||||||
version_line = re.search("\nversion = .*", self.content)
|
| BuildFileType.PYTHON
|
||||||
version_line_group = version_line.group()
|
| BuildFileType.JAVA_CLOJURE
|
||||||
version_string = re.search(
|
| BuildFileType.JAVA_CLOJURE_EDN
|
||||||
"[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?", version_line_group
|
):
|
||||||
)
|
version_str = re.search(
|
||||||
version_str = version_string.group()
|
self.__get_file_type_regex_str(build_file_type), self.content
|
||||||
case BuildFileType.PYTHON:
|
).group("version")
|
||||||
# TODO: '\nversion = ' will not parse all ?!
|
|
||||||
version_line = re.search("\nversion = .*\n", self.content)
|
|
||||||
version_line_group = version_line.group()
|
|
||||||
version_string = re.search(
|
|
||||||
"[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?(-dev)?[0-9]*",
|
|
||||||
version_line_group,
|
|
||||||
)
|
|
||||||
version_str = version_string.group()
|
|
||||||
case BuildFileType.JAVA_CLOJURE:
|
|
||||||
# TODO: unsure about the trailing '\n' !
|
|
||||||
version_line = re.search("\\(defproject .*\n", self.content)
|
|
||||||
version_line_group = version_line.group()
|
|
||||||
version_string = re.search(
|
|
||||||
"[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?", version_line_group
|
|
||||||
)
|
|
||||||
version_str = version_string.group()
|
|
||||||
except:
|
except:
|
||||||
raise RuntimeError(f"Version not found in file {self.file_path}")
|
raise RuntimeError(f"Version not found in file {self.file_path}")
|
||||||
|
|
||||||
|
@ -84,32 +85,26 @@ class BuildFile(Validateable):
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def set_version(self, new_version: Version):
|
def set_version(self, new_version: Version):
|
||||||
# TODO: How can we create regex-pattern constants to use them at both places?
|
|
||||||
|
if new_version.is_snapshot():
|
||||||
|
new_version.snapshot_suffix = self.get_default_suffix()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
match self.build_file_type():
|
build_file_type = self.build_file_type()
|
||||||
|
match build_file_type:
|
||||||
case BuildFileType.JS:
|
case BuildFileType.JS:
|
||||||
json_data = json.loads(self.content)
|
json_data = json.loads(self.content)
|
||||||
json_data["version"] = new_version.to_string()
|
json_data["version"] = new_version.to_string()
|
||||||
self.content = json.dumps(json_data, indent=4)
|
self.content = json.dumps(json_data, indent=4)
|
||||||
case BuildFileType.JAVA_GRADLE:
|
case (
|
||||||
|
BuildFileType.JAVA_GRADLE
|
||||||
|
| BuildFileType.PYTHON
|
||||||
|
| BuildFileType.JAVA_CLOJURE
|
||||||
|
| BuildFileType.JAVA_CLOJURE_EDN
|
||||||
|
):
|
||||||
substitute = re.sub(
|
substitute = re.sub(
|
||||||
'\nversion = "[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?"',
|
self.__get_file_type_regex_str(build_file_type),
|
||||||
f'\nversion = "{new_version.to_string()}"',
|
rf'\g<pre_version>"{new_version.to_string()}"',
|
||||||
self.content,
|
|
||||||
)
|
|
||||||
self.content = substitute
|
|
||||||
case BuildFileType.PYTHON:
|
|
||||||
substitute = re.sub(
|
|
||||||
'\nversion = "[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?(-dev)?[0-9]*"',
|
|
||||||
f'\nversion = "{new_version.to_string()}"',
|
|
||||||
self.content,
|
|
||||||
)
|
|
||||||
self.content = substitute
|
|
||||||
case BuildFileType.JAVA_CLOJURE:
|
|
||||||
# TODO: we should stick here on defproject instead of first line!
|
|
||||||
substitute = re.sub(
|
|
||||||
'"[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?"',
|
|
||||||
f'"{new_version.to_string()}"',
|
|
||||||
self.content,
|
self.content,
|
||||||
1,
|
1,
|
||||||
)
|
)
|
||||||
|
|
|
@ -78,6 +78,12 @@ class DnsRecord(Validateable):
|
||||||
result.append("ipv4 & ipv6 may not both be empty.")
|
result.append("ipv4 & ipv6 may not both be empty.")
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
def ip(self) -> str:
|
||||||
|
if (self.ipv4):
|
||||||
|
return self.ipv4
|
||||||
|
else:
|
||||||
|
return self.ipv6
|
||||||
|
|
||||||
|
|
||||||
class Devops(Validateable):
|
class Devops(Validateable):
|
||||||
def __init__(
|
def __init__(
|
||||||
|
|
|
@ -20,6 +20,14 @@ CONFIG_CERTMANAGER = """certmanager:
|
||||||
"""
|
"""
|
||||||
CONFIG_ECHO = """echo: $echo
|
CONFIG_ECHO = """echo: $echo
|
||||||
"""
|
"""
|
||||||
|
CONFIG_HETZNER_CSI = """hetzner:
|
||||||
|
hcloudApiToken:
|
||||||
|
source: "PLAIN" # PLAIN, GOPASS or PROMPT
|
||||||
|
parameter: $hcloud_api # the api key for the hetzner cloud
|
||||||
|
encryptionPassphrase:
|
||||||
|
source: "PLAIN" # PLAIN, GOPASS or PROMPT
|
||||||
|
parameter: $encryption # the encryption passphrase for created volumes
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
class K3s(Validateable):
|
class K3s(Validateable):
|
||||||
|
@ -28,8 +36,11 @@ class K3s(Validateable):
|
||||||
self.k3s_letsencrypt_email = inp.get("k3s_letsencrypt_email")
|
self.k3s_letsencrypt_email = inp.get("k3s_letsencrypt_email")
|
||||||
self.k3s_letsencrypt_endpoint = inp.get("k3s_letsencrypt_endpoint", "staging")
|
self.k3s_letsencrypt_endpoint = inp.get("k3s_letsencrypt_endpoint", "staging")
|
||||||
self.k3s_app_filename_to_provision = inp.get("k3s_app_filename_to_provision")
|
self.k3s_app_filename_to_provision = inp.get("k3s_app_filename_to_provision")
|
||||||
self.k3s_enable_echo = inp.get("k3s_enable_echo", "false")
|
self.k3s_enable_echo = inp.get("k3s_enable_echo", None)
|
||||||
self.k3s_provs_template = inp.get("k3s_provs_template", None)
|
self.k3s_provs_template = inp.get("k3s_provs_template", None)
|
||||||
|
self.k3s_enable_hetzner_csi = inp.get("k3s_enable_hetzner_csi", False)
|
||||||
|
self.k3s_hetzner_api_token = inp.get("k3s_hetzner_api_token", None)
|
||||||
|
self.k3s_hetzner_encryption_passphrase = inp.get("k3s_hetzner_encryption_passphrase", None)
|
||||||
self.provision_dns: Optional[DnsRecord] = None
|
self.provision_dns: Optional[DnsRecord] = None
|
||||||
|
|
||||||
def validate(self) -> List[str]:
|
def validate(self) -> List[str]:
|
||||||
|
@ -37,6 +48,9 @@ class K3s(Validateable):
|
||||||
result += self.__validate_is_not_empty__("k3s_letsencrypt_email")
|
result += self.__validate_is_not_empty__("k3s_letsencrypt_email")
|
||||||
result += self.__validate_is_not_empty__("k3s_letsencrypt_endpoint")
|
result += self.__validate_is_not_empty__("k3s_letsencrypt_endpoint")
|
||||||
result += self.__validate_is_not_empty__("k3s_app_filename_to_provision")
|
result += self.__validate_is_not_empty__("k3s_app_filename_to_provision")
|
||||||
|
if self.k3s_enable_hetzner_csi:
|
||||||
|
result += self.__validate_is_not_empty__("k3s_hetzner_api_token")
|
||||||
|
result += self.__validate_is_not_empty__("k3s_hetzner_encryption_passphrase")
|
||||||
if self.provision_dns:
|
if self.provision_dns:
|
||||||
result += self.provision_dns.validate()
|
result += self.provision_dns.validate()
|
||||||
return result
|
return result
|
||||||
|
@ -61,6 +75,9 @@ class K3s(Validateable):
|
||||||
substitutes["letsencrypt_endpoint"] = self.k3s_letsencrypt_endpoint
|
substitutes["letsencrypt_endpoint"] = self.k3s_letsencrypt_endpoint
|
||||||
if self.k3s_enable_echo is not None:
|
if self.k3s_enable_echo is not None:
|
||||||
substitutes["echo"] = self.k3s_enable_echo
|
substitutes["echo"] = self.k3s_enable_echo
|
||||||
|
if self.k3s_enable_hetzner_csi:
|
||||||
|
substitutes["hcloud_api"] = self.k3s_hetzner_api_token
|
||||||
|
substitutes["encryption"] = self.k3s_hetzner_encryption_passphrase
|
||||||
return self.__config_template__().substitute(substitutes)
|
return self.__config_template__().substitute(substitutes)
|
||||||
|
|
||||||
def command(self, devops: Devops):
|
def command(self, devops: Devops):
|
||||||
|
@ -69,7 +86,7 @@ class K3s(Validateable):
|
||||||
cmd = [
|
cmd = [
|
||||||
"provs-server.jar",
|
"provs-server.jar",
|
||||||
"k3s",
|
"k3s",
|
||||||
f"{self.k3s_provision_user}@{self.provision_dns.fqdn}",
|
f"{self.k3s_provision_user}@{self.provision_dns.ip()}",
|
||||||
"-c",
|
"-c",
|
||||||
f"{devops.build_path()}/out_k3sServerConfig.yaml",
|
f"{devops.build_path()}/out_k3sServerConfig.yaml",
|
||||||
"-a",
|
"-a",
|
||||||
|
@ -89,4 +106,6 @@ class K3s(Validateable):
|
||||||
template_text += CONFIG_IPV4
|
template_text += CONFIG_IPV4
|
||||||
if self.provision_dns.ipv6 is not None:
|
if self.provision_dns.ipv6 is not None:
|
||||||
template_text += CONFIG_IPV6
|
template_text += CONFIG_IPV6
|
||||||
|
if self.k3s_enable_hetzner_csi:
|
||||||
|
template_text += CONFIG_HETZNER_CSI
|
||||||
return Template(template_text)
|
return Template(template_text)
|
||||||
|
|
|
@ -24,6 +24,7 @@ class Release(Validateable):
|
||||||
"release_secondary_build_files", []
|
"release_secondary_build_files", []
|
||||||
)
|
)
|
||||||
self.version = version
|
self.version = version
|
||||||
|
self.release_tag_prefix = inp.get("release_tag_prefix", "")
|
||||||
self.release_artifact_server_url = inp.get("release_artifact_server_url")
|
self.release_artifact_server_url = inp.get("release_artifact_server_url")
|
||||||
self.release_organisation = inp.get("release_organisation")
|
self.release_organisation = inp.get("release_organisation")
|
||||||
self.release_repository_name = inp.get("release_repository_name")
|
self.release_repository_name = inp.get("release_repository_name")
|
||||||
|
|
|
@ -58,10 +58,8 @@ class ImageApi:
|
||||||
)
|
)
|
||||||
|
|
||||||
def drun(self, name: str):
|
def drun(self, name: str):
|
||||||
run(
|
self.execution_api.execute_live(
|
||||||
f'docker run -it --entrypoint="" {name} /bin/bash',
|
f'docker run -it {name} /bin/bash'
|
||||||
shell=True,
|
|
||||||
check=True,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def dockerhub_login(self, username: str, password: str):
|
def dockerhub_login(self, username: str, password: str):
|
||||||
|
@ -123,13 +121,12 @@ class ExecutionApi:
|
||||||
if dry_run:
|
if dry_run:
|
||||||
print(command)
|
print(command)
|
||||||
else:
|
else:
|
||||||
process = Popen(command, stdout=PIPE, shell=shell)
|
process = Popen(command, shell=shell)
|
||||||
for line in iter(process.stdout.readline, b""):
|
outs, errs = process.communicate()
|
||||||
print(line.decode("utf-8"), end="")
|
while outs is not None:
|
||||||
process.stdout.close()
|
stdout.buffer.write(outs)
|
||||||
return_code = process.wait()
|
if process.returncode != 0:
|
||||||
if return_code != 0:
|
raise RuntimeError(f"Execute live '{command}' failed with code {process.returncode}\nerrs: {errs}")
|
||||||
raise RuntimeError(f"Execute live failed with code: {return_code}")
|
|
||||||
|
|
||||||
|
|
||||||
class EnvironmentApi:
|
class EnvironmentApi:
|
||||||
|
|
|
@ -1,8 +1,11 @@
|
||||||
|
#
|
||||||
|
#deprecated, we recommend to use install_functions_debian.sh instead. We will going to remove install_functions.sh in a future release.
|
||||||
|
#
|
||||||
function upgradeSystem() {
|
function upgradeSystem() {
|
||||||
export DEBIAN_FRONTEND=noninteractive
|
{
|
||||||
apt-get update > /dev/null
|
apt-get update
|
||||||
apt-get -y install apt-utils > /dev/null
|
apt-get -qqy upgrade
|
||||||
apt-get -qqy dist-upgrade > /dev/null
|
} > /dev/null
|
||||||
}
|
}
|
||||||
|
|
||||||
function cleanupDocker() {
|
function cleanupDocker() {
|
||||||
|
|
21
src/main/resources/docker/image/resources/install_functions_alpine.sh
Executable file
21
src/main/resources/docker/image/resources/install_functions_alpine.sh
Executable file
|
@ -0,0 +1,21 @@
|
||||||
|
function upgradeSystem() {
|
||||||
|
apk -U upgrade
|
||||||
|
}
|
||||||
|
|
||||||
|
function cleanupDocker() {
|
||||||
|
rm -f /root/.ssh/authorized_keys
|
||||||
|
rm -f /root/.ssh/authorized_keys2
|
||||||
|
|
||||||
|
apk cache clean
|
||||||
|
|
||||||
|
rm -rf /tmp/*
|
||||||
|
|
||||||
|
find /var/cache -type f -exec rm -rf {} \;
|
||||||
|
find /var/log/ -name '*.log' -exec rm -f {} \;
|
||||||
|
}
|
||||||
|
|
||||||
|
function cleanupAmi() {
|
||||||
|
rm -f /home/ubuntu/.ssh/authorized_keys
|
||||||
|
rm -f /home/ubuntu/.ssh/authorized_keys2
|
||||||
|
cleanupDocker
|
||||||
|
}
|
25
src/main/resources/docker/image/resources/install_functions_debian.sh
Executable file
25
src/main/resources/docker/image/resources/install_functions_debian.sh
Executable file
|
@ -0,0 +1,25 @@
|
||||||
|
function upgradeSystem() {
|
||||||
|
apt-get update
|
||||||
|
apt-get -qqy upgrade
|
||||||
|
}
|
||||||
|
|
||||||
|
function cleanupDocker() {
|
||||||
|
rm -f /root/.ssh/authorized_keys
|
||||||
|
rm -f /root/.ssh/authorized_keys2
|
||||||
|
|
||||||
|
apt-get clean
|
||||||
|
apt-get -qqy autoremove --purge
|
||||||
|
apt-get -qqy autoclean
|
||||||
|
rm -rf /var/lib/apt/lists/
|
||||||
|
|
||||||
|
rm -rf /tmp/*
|
||||||
|
|
||||||
|
find /var/cache -type f -exec rm -rf {} \;
|
||||||
|
find /var/log/ -name '*.log' -exec rm -f {} \;
|
||||||
|
}
|
||||||
|
|
||||||
|
function cleanupAmi() {
|
||||||
|
rm -f /home/ubuntu/.ssh/authorized_keys
|
||||||
|
rm -f /home/ubuntu/.ssh/authorized_keys2
|
||||||
|
cleanupDocker
|
||||||
|
}
|
|
@ -13,7 +13,7 @@ from src.test.python.domain.helper import (
|
||||||
from src.main.python.ddadevops.application import ReleaseService
|
from src.main.python.ddadevops.application import ReleaseService
|
||||||
|
|
||||||
|
|
||||||
def test_sould_update_release_type():
|
def test_should_update_release_type():
|
||||||
sut = ReleaseService(
|
sut = ReleaseService(
|
||||||
GitApiMock(), ArtifactDeploymentApiMock(), BuildFileRepositoryMock("build.py")
|
GitApiMock(), ArtifactDeploymentApiMock(), BuildFileRepositoryMock("build.py")
|
||||||
)
|
)
|
||||||
|
@ -26,7 +26,7 @@ def test_sould_update_release_type():
|
||||||
sut.update_release_type(release, "NOT_EXISTING")
|
sut.update_release_type(release, "NOT_EXISTING")
|
||||||
|
|
||||||
|
|
||||||
def test_sould_publish_artifacts():
|
def test_should_publish_artifacts():
|
||||||
mock = ArtifactDeploymentApiMock(release='{"id": 2345}')
|
mock = ArtifactDeploymentApiMock(release='{"id": 2345}')
|
||||||
sut = ReleaseService(GitApiMock(), mock, BuildFileRepositoryMock())
|
sut = ReleaseService(GitApiMock(), mock, BuildFileRepositoryMock())
|
||||||
devops = build_devops(
|
devops = build_devops(
|
||||||
|
@ -41,7 +41,7 @@ def test_sould_publish_artifacts():
|
||||||
sut.publish_artifacts(release)
|
sut.publish_artifacts(release)
|
||||||
assert "http://repo.test/api/v1/repos/orga/repo/releases/2345/assets" == mock.add_asset_to_release_api_endpoint
|
assert "http://repo.test/api/v1/repos/orga/repo/releases/2345/assets" == mock.add_asset_to_release_api_endpoint
|
||||||
|
|
||||||
def test_sould_throw_exception_if_there_was_an_error_in_publish_artifacts():
|
def test_should_throw_exception_if_there_was_an_error_in_publish_artifacts():
|
||||||
devops = build_devops(
|
devops = build_devops(
|
||||||
{
|
{
|
||||||
"release_artifacts": ["target/art"],
|
"release_artifacts": ["target/art"],
|
||||||
|
|
|
@ -156,6 +156,9 @@ class GitApiMock:
|
||||||
def push(self):
|
def push(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def push_follow_tags(self):
|
||||||
|
pass
|
||||||
|
|
||||||
def checkout(self, branch: str):
|
def checkout(self, branch: str):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,7 @@ from src.main.python.ddadevops.domain import (
|
||||||
from .helper import build_devops, devops_config
|
from .helper import build_devops, devops_config
|
||||||
|
|
||||||
|
|
||||||
def test_sould_validate_release():
|
def test_should_validate_release():
|
||||||
sut = Artifact("x")
|
sut = Artifact("x")
|
||||||
assert sut.is_valid()
|
assert sut.is_valid()
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,7 @@ from src.main.python.ddadevops.domain import (
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_sould_validate_build_file():
|
def test_should_validate_build_file():
|
||||||
sut = BuildFile(Path("./project.clj"), "content")
|
sut = BuildFile(Path("./project.clj"), "content")
|
||||||
assert sut.is_valid()
|
assert sut.is_valid()
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@ def test_sould_validate_build_file():
|
||||||
assert not sut.is_valid()
|
assert not sut.is_valid()
|
||||||
|
|
||||||
|
|
||||||
def test_sould_calculate_build_type():
|
def test_should_calculate_build_type():
|
||||||
sut = BuildFile(Path("./project.clj"), "content")
|
sut = BuildFile(Path("./project.clj"), "content")
|
||||||
assert sut.build_file_type() == BuildFileType.JAVA_CLOJURE
|
assert sut.build_file_type() == BuildFileType.JAVA_CLOJURE
|
||||||
|
|
||||||
|
@ -29,7 +29,7 @@ def test_sould_calculate_build_type():
|
||||||
assert sut.build_file_type() == BuildFileType.JS
|
assert sut.build_file_type() == BuildFileType.JS
|
||||||
|
|
||||||
|
|
||||||
def test_sould_parse_and_set_js():
|
def test_should_parse_and_set_js():
|
||||||
sut = BuildFile(
|
sut = BuildFile(
|
||||||
Path("./package.json"),
|
Path("./package.json"),
|
||||||
"""
|
"""
|
||||||
|
@ -77,7 +77,7 @@ def test_sould_parse_and_set_js():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_sould_parse_and_set_version_for_gradle():
|
def test_should_parse_and_set_version_for_gradle():
|
||||||
sut = BuildFile(
|
sut = BuildFile(
|
||||||
Path("./build.gradle"),
|
Path("./build.gradle"),
|
||||||
"""
|
"""
|
||||||
|
@ -97,7 +97,7 @@ version = "1.1.5-SNAPSHOT"
|
||||||
assert '\nversion = "2.0.0"\n' == sut.content
|
assert '\nversion = "2.0.0"\n' == sut.content
|
||||||
|
|
||||||
|
|
||||||
def test_sould_parse_and_set_version_for_py():
|
def test_should_parse_and_set_version_for_py():
|
||||||
sut = BuildFile(
|
sut = BuildFile(
|
||||||
Path("./build.py"),
|
Path("./build.py"),
|
||||||
"""
|
"""
|
||||||
|
@ -143,7 +143,7 @@ version = "1.1.5-SNAPSHOT"
|
||||||
assert '\nversion = "2.0.0"\n' == sut.content
|
assert '\nversion = "2.0.0"\n' == sut.content
|
||||||
|
|
||||||
|
|
||||||
def test_sould_parse_and_set_version_for_clj():
|
def test_should_parse_and_set_version_for_clj():
|
||||||
sut = BuildFile(
|
sut = BuildFile(
|
||||||
Path("./project.clj"),
|
Path("./project.clj"),
|
||||||
"""
|
"""
|
||||||
|
@ -182,3 +182,71 @@ def test_sould_parse_and_set_version_for_clj():
|
||||||
'\n(defproject org.domaindrivenarchitecture/c4k-jira "2.0.0"\n:dependencies [[org.clojure/clojure "1.11.0"]]\n)\n '
|
'\n(defproject org.domaindrivenarchitecture/c4k-jira "2.0.0"\n:dependencies [[org.clojure/clojure "1.11.0"]]\n)\n '
|
||||||
== sut.content
|
== sut.content
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_should_parse_and_set_version_for_clj_edn():
|
||||||
|
sut = BuildFile(
|
||||||
|
Path("./deps.edn"),
|
||||||
|
"""
|
||||||
|
{:project {:name org.domaindrivenarchitecture/dda-backup
|
||||||
|
:version "1.1.5-SNAPSHOT"}
|
||||||
|
|
||||||
|
}
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
assert sut.get_version() == Version.from_str("1.1.5-SNAPSHOT", "SNAPSHOT")
|
||||||
|
|
||||||
|
sut = BuildFile(
|
||||||
|
Path("./deps.edn"),
|
||||||
|
"""
|
||||||
|
{:project {:name org.domaindrivenarchitecture/dda-backup
|
||||||
|
:version "1.1.5-SNAPSHOT"}
|
||||||
|
|
||||||
|
}
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
sut.set_version(Version.from_str("1.1.5-SNAPSHOT", "SNAPSHOT").create_major())
|
||||||
|
assert (
|
||||||
|
'\n{:project {:name org.domaindrivenarchitecture/dda-backup\n :version "2.0.0"}\n\n}\n'
|
||||||
|
== sut.content
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_should_throw_for_clj_wrong_version():
|
||||||
|
sut = BuildFile(
|
||||||
|
Path("./project.clj"),
|
||||||
|
"""
|
||||||
|
(defproject org.domaindrivenarchitecture/c4k-jira "1.1.5-Snapshot"
|
||||||
|
:description "jira c4k-installation package"
|
||||||
|
:url "https://domaindrivenarchitecture.org"
|
||||||
|
)
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(RuntimeError):
|
||||||
|
sut.get_version()
|
||||||
|
|
||||||
|
def test_should_ignore_first_version_for_py():
|
||||||
|
sut = BuildFile(
|
||||||
|
Path("./build.py"),
|
||||||
|
"""
|
||||||
|
from pybuilder.core import init, use_plugin, Author
|
||||||
|
use_plugin("python.core")
|
||||||
|
|
||||||
|
name = "ddadevops"
|
||||||
|
project_version = "0.0.2-dev1"
|
||||||
|
version = "1.1.5-dev12"
|
||||||
|
summary = "tools to support builds combining gopass, terraform, dda-pallet, aws & hetzner-cloud"
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
assert sut.get_version() == Version.from_str("1.1.5-dev12", "dev")
|
||||||
|
|
||||||
|
def test_should_ignore_first_version_for_gradle():
|
||||||
|
sut = BuildFile(
|
||||||
|
Path("./build.gradle"),
|
||||||
|
"""
|
||||||
|
kotlin_version = "3.3.3"
|
||||||
|
version = "1.1.5-SNAPSHOT"
|
||||||
|
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
assert sut.get_version() == Version.from_str("1.1.5-SNAPSHOT", "SNAPSHOT")
|
|
@ -24,7 +24,7 @@ def test_should_calculate_command():
|
||||||
assert (
|
assert (
|
||||||
"provs-server.jar "
|
"provs-server.jar "
|
||||||
+ "k3s "
|
+ "k3s "
|
||||||
+ "k3s_provision_user@example.org "
|
+ "k3s_provision_user@::1 "
|
||||||
+ "-c "
|
+ "-c "
|
||||||
+ "root_path/target/name/module/out_k3sServerConfig.yaml "
|
+ "root_path/target/name/module/out_k3sServerConfig.yaml "
|
||||||
+ "-a "
|
+ "-a "
|
||||||
|
|
|
@ -15,7 +15,7 @@ from src.main.python.ddadevops.domain import (
|
||||||
from .helper import build_devops, devops_config
|
from .helper import build_devops, devops_config
|
||||||
|
|
||||||
|
|
||||||
def test_sould_validate_release():
|
def test_should_validate_release():
|
||||||
sut = Release(
|
sut = Release(
|
||||||
devops_config(
|
devops_config(
|
||||||
{
|
{
|
||||||
|
@ -49,7 +49,7 @@ def test_sould_validate_release():
|
||||||
assert not sut.is_valid()
|
assert not sut.is_valid()
|
||||||
|
|
||||||
|
|
||||||
def test_sould_calculate_build_files():
|
def test_should_calculate_build_files():
|
||||||
sut = Release(
|
sut = Release(
|
||||||
devops_config(
|
devops_config(
|
||||||
{
|
{
|
||||||
|
|
|
@ -5,7 +5,9 @@ from pybuilder.core import Project
|
||||||
|
|
||||||
from src.main.python.ddadevops.release_mixin import ReleaseMixin
|
from src.main.python.ddadevops.release_mixin import ReleaseMixin
|
||||||
from src.main.python.ddadevops.domain import Devops, Release
|
from src.main.python.ddadevops.domain import Devops, Release
|
||||||
from .domain.helper import devops_config
|
from src.main.python.ddadevops.application import ReleaseService
|
||||||
|
from src.main.python.ddadevops.infrastructure import BuildFileRepository
|
||||||
|
from .domain.helper import devops_config, GitApiMock, ArtifactDeploymentApiMock
|
||||||
from .resource_helper import copy_resource
|
from .resource_helper import copy_resource
|
||||||
|
|
||||||
|
|
||||||
|
@ -30,3 +32,37 @@ def test_release_mixin(tmp_path):
|
||||||
|
|
||||||
sut.initialize_build_dir()
|
sut.initialize_build_dir()
|
||||||
assert sut.build_path() == f"{str_tmp_path}/target/name/release-test"
|
assert sut.build_path() == f"{str_tmp_path}/target/name/release-test"
|
||||||
|
|
||||||
|
def test_release_mixin_different_version_suffixes(tmp_path):
|
||||||
|
str_tmp_path = str(tmp_path)
|
||||||
|
copy_resource(Path("config.py"), tmp_path)
|
||||||
|
copy_resource(Path("config.gradle"), tmp_path)
|
||||||
|
|
||||||
|
project = Project(str_tmp_path, name="name")
|
||||||
|
|
||||||
|
os.environ["RELEASE_ARTIFACT_TOKEN"] = "ratoken"
|
||||||
|
|
||||||
|
sut = ReleaseMixin(
|
||||||
|
project,
|
||||||
|
devops_config(
|
||||||
|
{
|
||||||
|
"project_root_path": str_tmp_path,
|
||||||
|
"mixin_types": ["RELEASE"],
|
||||||
|
"build_types": [],
|
||||||
|
"module": "release-test",
|
||||||
|
"release_current_branch": "main",
|
||||||
|
"release_main_branch": "main",
|
||||||
|
"release_primary_build_file": "config.py",
|
||||||
|
"release_secondary_build_files": ["config.gradle"],
|
||||||
|
}
|
||||||
|
),
|
||||||
|
)
|
||||||
|
sut.release_service = ReleaseService(GitApiMock(), ArtifactDeploymentApiMock(), BuildFileRepository(project.basedir))
|
||||||
|
|
||||||
|
sut.initialize_build_dir()
|
||||||
|
sut.update_release_type("PATCH")
|
||||||
|
sut.prepare_release()
|
||||||
|
sut.tag_bump_and_push_release()
|
||||||
|
|
||||||
|
assert sut.release_service.build_file_repository.get(Path("config.py")).get_version().to_string() == "3.1.5-dev"
|
||||||
|
assert sut.release_service.build_file_repository.get(Path("config.gradle")).get_version().to_string() == "3.1.5-SNAPSHOT"
|
Loading…
Reference in a new issue