Compare commits

..

4 commits

Author SHA1 Message Date
c182505b76 Handle exceptions and print stderr 2023-07-12 13:36:23 +02:00
07cf60ba59 Remove unnecessary test 2023-07-12 13:35:36 +02:00
8cb17dea54 Test if stderr is in output 2023-07-12 13:02:03 +02:00
0a9efe85d2 Make minimal image ci 2023-07-12 13:01:10 +02:00
82 changed files with 875 additions and 2238 deletions

42
.github/workflows/stable.yml vendored Normal file
View file

@ -0,0 +1,42 @@
name: stable
on:
push:
tags:
- '[0-9]+.[0-9]+.[0-9]+'
jobs:
build:
name: stable build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Use python 3.x
uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: build stable release
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_DDA }}
run: |
pyb -P version=${{ github.ref }} publish upload
- name: Create GH Release
id: create_release
uses: actions/create-release@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ github.ref }}
release_name: Release ${{ github.ref }}
draft: false
prerelease: false

30
.github/workflows/unstable.yml vendored Normal file
View file

@ -0,0 +1,30 @@
name: unstable
on:
push:
tags:
- '![0-9]+.[0-9]+.[0-9]+'
jobs:
build:
name: unstable
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Use python 3.x
uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: build unstable release
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_DDA }}
run: |
pyb publish upload

3
.gitignore vendored
View file

@ -109,6 +109,3 @@ venv.bak/
.clj-kondo/ .clj-kondo/
.lsp/ .lsp/
.calva/
.cpcache/
infrastructure/backup/image/resources/backup-repository-state.edn

View file

@ -1,89 +1,17 @@
stages: image: "domaindrivenarchitecture/devops-build:4.0.8"
- lint&test
- upload
- image
.py: &py before_script:
image: "domaindrivenarchitecture/ddadevops-python:4.10.7" - python --version
before_script: - python -m pip install --upgrade pip
- export RELEASE_ARTIFACT_TOKEN=$MEISSA_REPO_BUERO_RW - pip install -r requirements.txt
- python --version - export IMAGE_TAG=$CI_IMAGE_TAG
- pip install -r requirements.txt
.img: &img
image: "domaindrivenarchitecture/ddadevops-dind:4.10.7"
services:
- docker:dind
before_script:
- export IMAGE_DOCKERHUB_USER=$DOCKERHUB_USER - export IMAGE_DOCKERHUB_USER=$DOCKERHUB_USER
- export IMAGE_DOCKERHUB_PASSWORD=$DOCKERHUB_PASSWORD - export IMAGE_DOCKERHUB_PASSWORD=$DOCKERHUB_PASSWORD
- export IMAGE_TAG=$CI_COMMIT_TAG
.tag_only: &tag_only stages:
rules: - image
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: never
- if: '$CI_COMMIT_TAG =~ /^[0-9]+\.[0-9]+\.[0-9]+$/'
lint: devops-build-image-test-publish:
<<: *py
stage: lint&test
script:
- pip install -r dev_requirements.txt
- pyb lint
pytest:
<<: *py
stage: lint&test
script:
- pip install -r dev_requirements.txt
- pyb test
pypi-stable:
<<: *py
<<: *tag_only
stage: upload
script:
- pyb -P version=$CI_COMMIT_TAG publish upload publish_artifacts
clj-cljs-image-publish:
<<: *img
<<: *tag_only
stage: image stage: image
script: script:
- cd infrastructure/clj-cljs && pyb image publish - cd infrastructure/devops-build && pyb image
clj-image-publish:
<<: *img
<<: *tag_only
stage: image
script:
- cd infrastructure/clj && pyb image publish
python-image-publish:
<<: *img
<<: *tag_only
stage: image
script:
- cd infrastructure/python && pyb image publish
dind-image-publish:
<<: *img
<<: *tag_only
stage: image
script:
- cd infrastructure/dind && pyb image publish
ddadevops-image-publish:
<<: *img
<<: *tag_only
stage: image
script:
- cd infrastructure/ddadevops && pyb image publish
kotlin-image-publish:
<<: *img
<<: *tag_only
stage: image
script:
- cd infrastructure/kotlin && pyb image publish

View file

@ -1,7 +1,8 @@
# dda-devops-build # dda-devops-build
[![Slack](https://img.shields.io/badge/chat-clojurians-green.svg?style=flat)](https://clojurians.slack.com/messages/#dda-pallet/) | [<img src="https://domaindrivenarchitecture.org/img/delta-chat.svg" width=20 alt="DeltaChat"> chat over e-mail](mailto:buero@meissa-gmbh.de?subject=community-chat) | [<img src="https://meissa.de/images/parts/contact/mastodon36_hue9b2464f10b18e134322af482b9c915e_5501_filter_14705073121015236177.png" width=20 alt="M"> meissa@social.meissa-gmbh.de](https://social.meissa-gmbh.de/@meissa) | [Blog](https://domaindrivenarchitecture.org) | [Website](https://meissa.de) [![Slack](https://img.shields.io/badge/chat-clojurians-green.svg?style=flat)](https://clojurians.slack.com/messages/#dda-pallet/) | [<img src="https://meissa-gmbh.de/img/community/Mastodon_Logotype.svg" width=20 alt="team@social.meissa-gmbh.de"> team@social.meissa-gmbh.de](https://social.meissa-gmbh.de/@team) | [Website & Blog](https://domaindrivenarchitecture.org)
![release prod](https://github.com/DomainDrivenArchitecture/dda-devops-build/workflows/release%20prod/badge.svg)
dda-devops-build integrates all the tools we use to work with clouds & provide some nice functions around. dda-devops-build integrates all the tools we use to work with clouds & provide some nice functions around.
@ -69,12 +70,12 @@ classDiagram
DevopsBuild <|-- ProvsK3sBuild DevopsBuild <|-- ProvsK3sBuild
DevopsBuild <|-- C4kBuild DevopsBuild <|-- C4kBuild
link DevopsBuild "dda-devops-build/src/doc/DevopsBuild.md" link DevopsBuild "./doc/DevopsBuild.md"
link DevopsImageBuild "dda-devops-build/src/doc/DevopsImageBuild.md" link DevopsImageBuild "./doc/DevopsImageBuild.md"
link DevopsTerraformBuild "dda-devops-build/src/doc/DevopsTerraformBuild.md" link DevopsTerraformBuild "./doc/DevopsTerraformBuild.md"
link ReleaseMixin "dda-devops-build/src/doc/ReleaseMixin.md" link ReleaseMixin "./doc/ReleaseMixin.md"
link ProvsK3sBuild "dda-devops-build/src/doc/ProvsK3sBuild.md" link ProvsK3sBuild "doc/ProvsK3sBuild.md"
link C4kBuild "dda-devops-build/src/doc/C4kBuild.md" link C4kBuild "doc/C4kBuild.md"
``` ```
@ -83,10 +84,6 @@ Principles we follow are:
* Seperate build artefacts from version controlled code * Seperate build artefacts from version controlled code
* Domain Driven Design - in order to stay sustainable * Domain Driven Design - in order to stay sustainable
## Example Project
An example project which is using dda-devops-build can be found at: https://repo.prod.meissa.de/meissa/buildtest
## Installation ## Installation
Ensure that yout python3 version is at least Python 3.10 Ensure that yout python3 version is at least Python 3.10
@ -97,9 +94,17 @@ pip3 install -r requirements.txt
export PATH=$PATH:~/.local/bin export PATH=$PATH:~/.local/bin
``` ```
## Example Project ## Reference
An example project which is using dda-devops-build can be found at: https://repo.prod.meissa.de/meissa/buildtest * [DevopsBuild](./doc/DevopsBuild.md)
* [DevopsImageBuild](./doc/DevopsImageBuild.md)
* [DevopsTerraformBuild](./doc/DevopsTerraformBuild.md)
* [AwsProvider](doc/DevopsTerraformBuildWithAwsProvider.md)
* [DigitaloceanProvider](doc/DevopsTerraformBuildWithDigitaloceanProvider.md)
* [HetznerProvider](doc/DevopsTerraformBuildWithHetznerProvider.md)
* [ReleaseMixin](./doc/ReleaseMixin.md)
* [ProvsK3sBuild](doc/ProvsK3sBuild.md)
* [C4kBuild](doc/C4kBuild.md)
## Example Build ## Example Build
@ -181,23 +186,10 @@ def destroy(project):
pyb dev publish upload pyb dev publish upload
pip3 install --upgrade ddadevops --pre pip3 install --upgrade ddadevops --pre
pyb [patch|minor|major] pyb [patch|minor|major] prepare_release tag_bump_and_push_release
pip3 install --upgrade ddadevops pip3 install --upgrade ddadevops
``` ```
## Reference
* [DevopsBuild](./doc/DevopsBuild.md)
* [DevopsImageBuild](./doc/DevopsImageBuild.md)
* [DevopsTerraformBuild](./doc/DevopsTerraformBuild.md)
* [AwsProvider](doc/DevopsTerraformBuildWithAwsProvider.md)
* [DigitaloceanProvider](doc/DevopsTerraformBuildWithDigitaloceanProvider.md)
* [HetznerProvider](doc/DevopsTerraformBuildWithHetznerProvider.md)
* [ReleaseMixin](./doc/ReleaseMixin.md)
* [ProvsK3sBuild](doc/ProvsK3sBuild.md)
* [C4kBuild](doc/C4kBuild.md)
## Development & mirrors ## Development & mirrors
Development happens at: https://repo.prod.meissa.de/meissa/dda-devops-build Development happens at: https://repo.prod.meissa.de/meissa/dda-devops-build
@ -205,7 +197,6 @@ Development happens at: https://repo.prod.meissa.de/meissa/dda-devops-build
Mirrors are: Mirrors are:
* https://gitlab.com/domaindrivenarchitecture/dda-devops-build (issues and PR, CI) * https://gitlab.com/domaindrivenarchitecture/dda-devops-build (issues and PR, CI)
* https://github.com/DomainDrivenArchitecture/dda-devops-build
For more details about our repository model see: https://repo.prod.meissa.de/meissa/federate-your-repos For more details about our repository model see: https://repo.prod.meissa.de/meissa/federate-your-repos
@ -213,3 +204,8 @@ For more details about our repository model see: https://repo.prod.meissa.de/mei
Copyright © 2021 meissa GmbH Copyright © 2021 meissa GmbH
Licensed under the [Apache License, Version 2.0](LICENSE) (the "License") Licensed under the [Apache License, Version 2.0](LICENSE) (the "License")
## License
Copyright © 2023 meissa GmbH
Licensed under the [Apache License, Version 2.0](LICENSE) (the "License")

144
build.py
View file

@ -22,71 +22,63 @@ from ddadevops import *
use_plugin("python.core") use_plugin("python.core")
use_plugin("copy_resources") use_plugin("copy_resources")
use_plugin("filter_resources") use_plugin("filter_resources")
# use_plugin("python.unittest") #use_plugin("python.unittest")
# use_plugin("python.coverage") #use_plugin("python.coverage")
use_plugin("python.distutils") use_plugin("python.distutils")
# use_plugin("python.install_dependencies") #use_plugin("python.install_dependencies")
default_task = "dev" default_task = "dev"
name = "ddadevops" name = "ddadevops"
MODULE = "not-used" MODULE = "not-used"
PROJECT_ROOT_PATH = "." PROJECT_ROOT_PATH = "."
version = "4.13.2-dev" version = "4.0.12-dev"
summary = "tools to support builds combining gopass, terraform, dda-pallet, aws & hetzner-cloud" summary = "tools to support builds combining gopass, terraform, dda-pallet, aws & hetzner-cloud"
description = __doc__ description = __doc__
authors = [Author("meissa GmbH", "buero@meissa-gmbh.de")] authors = [Author("meissa GmbH", "buero@meissa-gmbh.de")]
url = "https://repo.prod.meissa.de/meissa/dda-devops-build" url = "https://repo.prod.meissa.de/meissa/dda-devops-build"
requires_python = ">=3.10" # CHECK IF NEW VERSION EXISTS requires_python = ">=3.10" # CHECK IF NEW VERSION EXISTS
license = "Apache Software License" license = "Apache Software License"
@init @init
def initialize(project): def initialize(project):
# project.build_depends_on('mockito') #project.build_depends_on('mockito')
# project.build_depends_on('unittest-xml-reporting') #project.build_depends_on('unittest-xml-reporting')
project.build_depends_on("ddadevops>=4.7.0") project.build_depends_on("ddadevops>=4.0.0")
project.set_property("verbose", True) project.set_property("verbose", True)
project.get_property("filter_resources_glob").append( project.get_property("filter_resources_glob").append("main/python/ddadevops/__init__.py")
"main/python/ddadevops/__init__.py"
)
project.set_property("dir_source_unittest_python", "src/test/python") project.set_property("dir_source_unittest_python", "src/test/python")
project.set_property("copy_resources_target", "$dir_dist/ddadevops") project.set_property("copy_resources_target", "$dir_dist/ddadevops")
project.get_property("copy_resources_glob").append("LICENSE") project.get_property("copy_resources_glob").append("LICENSE")
project.get_property("copy_resources_glob").append("src/main/resources/terraform/*") project.get_property("copy_resources_glob").append("src/main/resources/terraform/*")
project.get_property("copy_resources_glob").append( project.get_property("copy_resources_glob").append("src/main/resources/docker/image/resources/*")
"src/main/resources/docker/image/resources/*"
)
project.include_file("ddadevops", "LICENSE") project.include_file("ddadevops", "LICENSE")
project.include_file("ddadevops", "src/main/resources/terraform/*") project.include_file("ddadevops", "src/main/resources/terraform/*")
project.include_file("ddadevops", "src/main/resources/docker/image/resources/*") project.include_file("ddadevops", "src/main/resources/docker/image/resources/*")
# project.set_property('distutils_upload_sign', True) #project.set_property('distutils_upload_sign', True)
# project.set_property('distutils_upload_sign_identity', '') #project.set_property('distutils_upload_sign_identity', '')
project.set_property("distutils_readme_description", True) project.set_property("distutils_readme_description", True)
project.set_property("distutils_description_overwrite", True) project.set_property("distutils_description_overwrite", True)
project.set_property( project.set_property("distutils_classifiers", [
"distutils_classifiers", 'License :: OSI Approved :: Apache Software License',
[ 'Programming Language :: Python',
"License :: OSI Approved :: Apache Software License", 'Programming Language :: Python :: 3',
"Programming Language :: Python", 'Programming Language :: Python :: 3.8',
"Programming Language :: Python :: 3", 'Programming Language :: Python :: 3.10',
"Programming Language :: Python :: 3.8", 'Operating System :: POSIX :: Linux',
"Programming Language :: Python :: 3.10", 'Operating System :: OS Independent',
"Operating System :: POSIX :: Linux", 'Development Status :: 5 - Production/Stable',
"Operating System :: OS Independent", 'Environment :: Console',
"Development Status :: 5 - Production/Stable", 'Intended Audience :: Developers',
"Environment :: Console", 'License :: OSI Approved :: Apache Software License',
"Intended Audience :: Developers", 'Topic :: Software Development :: Build Tools',
"License :: OSI Approved :: Apache Software License", 'Topic :: Software Development :: Quality Assurance',
"Topic :: Software Development :: Build Tools", 'Topic :: Software Development :: Testing'
"Topic :: Software Development :: Quality Assurance", ])
"Topic :: Software Development :: Testing",
],
)
input = { input = {
"name": name, "name": name,
@ -96,99 +88,59 @@ def initialize(project):
"build_types": [], "build_types": [],
"mixin_types": ["RELEASE"], "mixin_types": ["RELEASE"],
"release_primary_build_file": "build.py", "release_primary_build_file": "build.py",
"release_secondary_build_files": [
"infrastructure/python/build.py",
"infrastructure/dind/build.py",
"infrastructure/ddadevops/build.py",
"infrastructure/clj-cljs/build.py",
"infrastructure/clj/build.py",
"infrastructure/kotlin/build.py",
],
"release_artifacts": [],
"release_artifact_server_url": "https://repo.prod.meissa.de",
"release_organisation": "meissa",
"release_repository_name": "dda-devops-build",
} }
build = ReleaseMixin(project, input) build = ReleaseMixin(project, input)
build.initialize_build_dir() build.initialize_build_dir()
@task @task
def test(project): def test(project):
run("pytest", check=True) run("pytest", check=True)
@task @task
def lint(project): def lint(project):
run( run("flake8 --max-line-length=120 --count --select=E9,F63,F7,F82 "+
"flake8 --max-line-length=120 --count --select=E9,F63,F7,F82 " "--show-source --statistics src/main/python/ddadevops/", shell=True, check=True)
+ "--show-source --statistics src/main/python/ddadevops/", run("flake8 --count --exit-zero --max-complexity=10 --max-line-length=127 "+
shell=True, "--per-file-ignores=\"__init__.py:F401\" "+
check=True, "--ignore=E722,W503 --statistics src/main/python/ddadevops/", shell=True, check=True)
) run("python -m mypy src/main/python/ddadevops/ --ignore-missing-imports "+
run( "--disable-error-code=attr-defined --disable-error-code=union-attr", shell=True, check=True)
"flake8 --count --exit-zero --max-complexity=10 --max-line-length=127 " run("pylint -d W0511,R0903,C0301,W0614,C0114,C0115,C0116,similarities,W1203,W0702,W0702,"+
+ '--per-file-ignores="__init__.py:F401" ' "R0913,R0902,R0914,R1732,R1705,W0707,C0123,W0703,C0103 src/main/python/ddadevops/", shell=True, check=True)
+ "--ignore=E722,W503 --statistics src/main/python/ddadevops/",
shell=True,
check=True,
)
run(
"python -m mypy src/main/python/ddadevops/ --ignore-missing-imports "
+ "--disable-error-code=attr-defined --disable-error-code=union-attr",
shell=True,
check=True,
)
@task @task
def patch(project): def patch(project):
linttest(project, "PATCH") build(project, "PATCH")
release(project)
@task @task
def minor(project): def minor(project):
linttest(project, "MINOR") build(project, "MINOR")
release(project)
@task @task
def major(project): def major(project):
linttest(project, "MAJOR") build(project, "MAJOR")
release(project)
@task @task
def dev(project): def dev(project):
linttest(project, "NONE") build(project, "NONE")
@task @task
def prepare(project): def nothing(project):
pass
@task
def prepare_release(project):
build = get_devops_build(project) build = get_devops_build(project)
build.prepare_release() build.prepare_release()
@task @task
def tag(project): def tag_bump_and_push_release(project):
build = get_devops_build(project) build = get_devops_build(project)
build.tag_bump_and_push_release() build.tag_bump_and_push_release()
@task def build(project, release_type):
def publish_artifacts(project):
build = get_devops_build(project)
build.publish_artifacts()
def release(project):
prepare(project)
tag(project)
def linttest(project, release_type):
build = get_devops_build(project) build = get_devops_build(project)
build.update_release_type(release_type) build.update_release_type(release_type)
test(project) test(project)

View file

@ -35,12 +35,7 @@ classDiagram
| name | name in context of build & ENV | - | | | name | name in context of build & ENV | - | |
## Example Usage ## Example Usage
### build.py
### Example project
A complete example project you can find on: https://repo.prod.meissa.de/meissa/buildtest
### Example of a build.py
```python ```python
from os import environ from os import environ

View file

@ -23,9 +23,12 @@ classDiagram
| build_dir_name | name of dir, build is executed in | target | | build_dir_name | name of dir, build is executed in | target |
| build_types | list of special builds used. Valid values are ["IMAGE", "C4K", "K3S", "TERRAFORM"] | [] | | build_types | list of special builds used. Valid values are ["IMAGE", "C4K", "K3S", "TERRAFORM"] | [] |
| mixin_types | mixins are orthoganl to builds and represent additional capabilities. Valid Values are ["RELEASE"] | [] | | mixin_types | mixins are orthoganl to builds and represent additional capabilities. Valid Values are ["RELEASE"] | [] |
| module | module name - may result in a hierarchy like name/module | |
| name | dedicated name of the build | module |
| project_root_path | relative path to projects root. Is used to locate the target dir | |
| stage | sth. like test, int, acc or prod | |
## Example Usage ## Example Usage
### build.py ### build.py
```python ```python

View file

@ -30,7 +30,7 @@ classDiagram
| image_dockerhub_user | user to access docker-hub | IMAGE_DOCKERHUB_USER from env or credentials from gopass | | image_dockerhub_user | user to access docker-hub | IMAGE_DOCKERHUB_USER from env or credentials from gopass |
| image_dockerhub_password | password to access docker-hub | IMAGE_DOCKERHUB_PASSWORD from env or credentials from gopass | | image_dockerhub_password | password to access docker-hub | IMAGE_DOCKERHUB_PASSWORD from env or credentials from gopass |
| image_tag | tag for publishing the image | IMAGE_TAG from env | | image_tag | tag for publishing the image | IMAGE_TAG from env |
| image_naming | Strategy for calculate the image name. Posible values are [NAME_ONLY,NAME_AND_MODULE] |NAME_ONLY |
### Credentials Mapping defaults ### Credentials Mapping defaults

View file

@ -1,33 +0,0 @@
# ddadevops Images
## ddadevops-clojure
Contains
* clojure
* shadowcljs
* lein
* java
* graalvm
* pybuilder, ddadevops
## ddadevops
Contains:
* pybuilder, ddadevops
## devops-build
Image is deprecated.
## ddadevops-dind
Contains:
* docker in docker
* pybuilder, ddadevops
## ddadevops-python
Contains:
* python 3.10
* python linting
* python setup-tools
* pybuilder, ddadevops

View file

@ -13,17 +13,14 @@ classDiagram
## Input ## Input
| name | description | default | | name | description | default |
| --------------------------------- | ----------------------------------------------------------------- | --------- | | ----------------------------- | ----------------------------------------------------------------- | --------- |
| k3s_provision_user | the user used to provision k3s | "root" | | k3s_provision_user | the user used to provision k3s | "root" |
| k3s_letsencrypt_email | email address used for letsencrypt | | | k3s_letsencrypt_email | email address used for letsencrypt | |
| k3s_letsencrypt_endpoint | letsencrypt endpoint. Valid values are staging, prod | "staging" | | k3s_letsencrypt_endpoint | letsencrypt endpoint. Valid values are staging, prod | "staging" |
| k3s_app_filename_to_provision | an k8s manifest to apply imediately after k3s setup was sucessful | | | k3s_app_filename_to_provision | an k8s manifest to apply imediately after k3s setup was sucessful | |
| k3s_enable_echo | provision the echo app on k3s. Valid values are true, false | "false" | | k3s_enable_echo | provision the echo app on k3s. Valid values are true, false | "false" |
| k3s_provs_template | use a individual template for provs config | None | | k3s_provs_template | use a individual template for provs config | None |
| k3s_enable_hetzner_csi | enable hetzner csi | False |
| k3s_hetzner_api_token | hetzner_api_token | None |
| k3s_hetzner_encryption_passphrase | encryption passphrase for volumes | None |
### Credentials Mapping defaults ### Credentials Mapping defaults

View file

@ -1,15 +1,5 @@
# ReleaseMixin # ReleaseMixin
- [ReleaseMixin](#releasemixin)
- [Input](#input)
- [Example Usage just for creating releases](#example-usage-just-for-creating-releases)
- [build.py](#buildpy)
- [call the build for creating a major release](#call-the-build-for-creating-a-major-release)
- [Example Usage for creating a release on forgejo / gitea \& upload the generated artifacts](#example-usage-for-creating-a-release-on-forgejo--gitea--upload-the-generated-artifacts)
- [build.py](#buildpy-1)
- [call the build](#call-the-build)
Support for releases following the trunk-based-release flow (see https://trunkbaseddevelopment.com/) Support for releases following the trunk-based-release flow (see https://trunkbaseddevelopment.com/)
```mermaid ```mermaid
@ -18,7 +8,6 @@ classDiagram
prepare_release() - adjust all build files to carry the correct version & commit locally prepare_release() - adjust all build files to carry the correct version & commit locally
tag_and_push_release() - tag the git repo and push changes to origin tag_and_push_release() - tag the git repo and push changes to origin
update_release_type (release_type) - change the release type during run time update_release_type (release_type) - change the release type during run time
publish_artifacts() - publish release & artifacts to forgejo/gitea
} }
``` ```
@ -26,103 +15,13 @@ classDiagram
## Input ## Input
| name | description | default | | name | description | default |
| ----------------------------- |-----------------------------------------------------------------------------------------------------------------------| --------------- | | ----------------------------- | --------------------------------------------------------------------------------------------------------------------- | --------------- |
| release_type | one of MAJOR, MINOR, PATCH, NONE | "NONE" | | release_type | one of MAJOR, MINOR, PATCH, NONE | "NONE" |
| release_main_branch | the name of your trunk | "main" | | release_main_branch | the name of your trank | "main" |
| release_primary_build_file | path to the build file having the leading version info (read & write). Valid extensions are .clj, .json, .gradle, .py | "./project.clj" | | release_primary_build_file | path to the build file having the leading version info (read & write). Valid extensions are .clj, .json, .gradle, .py | "./project.clj" |
| release_secondary_build_files | list of secondary build files, version is written in. | [] | | release_secondary_build_files | list of secondary build files, version is written in. | [] |
| release_artifact_server_url | Optional: The base url of your forgejo/gitea instance to publish a release tode | |
| release_organisation | Optional: The repository organisation name | |
| release_repository_name | Optional: The repository name name | |
| release_artifacts | Optional: The list of artifacts to publish to the release generated name | [] |
| release_tag_prefix | Optional: Prefix of tag | "" |
## Example Usage just for creating releases ## Example Usage
### build.py
```python
from os import environ
from pybuilder.core import task, init
from ddadevops import *
name = 'my-project'
MODULE = 'my-module'
PROJECT_ROOT_PATH = '..'
@init
def initialize(project):
project.build_depends_on("ddadevops>=4.7.0")
input = {
"name": name,
"module": MODULE,
"stage": "notused",
"project_root_path": PROJECT_ROOT_PATH,
"build_types": [],
"mixin_types": ["RELEASE"],
"release_type": "MINOR",
"release_primary_build_file": "project.clj",
"release_secondary_build_files": ["package.json"],
}
build = ReleaseMixin(project, input)
build.initialize_build_dir()
@task
def patch(project):
linttest(project, "PATCH")
release(project)
@task
def minor(project):
linttest(project, "MINOR")
release(project)
@task
def major(project):
linttest(project, "MAJOR")
release(project)
@task
def dev(project):
linttest(project, "NONE")
@task
def prepare(project):
build = get_devops_build(project)
build.prepare_release()
@task
def tag(project):
build = get_devops_build(project)
build.tag_bump_and_push_release()
def release(project):
prepare(project)
tag(project)
def linttest(project, release_type):
build = get_devops_build(project)
build.update_release_type(release_type)
#test(project)
#lint(project)
```
### call the build for creating a major release
```bash
pyb major
```
## Example Usage for creating a release on forgejo / gitea & upload the generated artifacts
### build.py ### build.py
@ -137,7 +36,7 @@ PROJECT_ROOT_PATH = '..'
@init @init
def initialize(project): def initialize(project):
project.build_depends_on("ddadevops>=4.7.0") project.build_depends_on("ddadevops>=4.0.0")
input = { input = {
"name": name, "name": name,
@ -149,23 +48,35 @@ def initialize(project):
"release_type": "MINOR", "release_type": "MINOR",
"release_primary_build_file": "project.clj", "release_primary_build_file": "project.clj",
"release_secondary_build_files": ["package.json"], "release_secondary_build_files": ["package.json"],
"release_artifact_server_url": "https://repo.prod.meissa.de",
"release_organisation": "meissa",
"release_repository_name": "dda-devops-build",
"release_artifacts": ["target/doc.zip"],
} }
roject.build_depends_on("ddadevops>=4.0.0-dev")
build = ReleaseMixin(project, input) build = ReleaseMixin(project, input)
build.initialize_build_dir() build.initialize_build_dir()
@task @task
def publish_artifacts(project): def prepare_release(project):
build = get_devops_build(project) build = get_devops_build(project)
build.publish_artifacts() build.prepare_release()
@task
def build(project):
print("do the build")
@task
def publish(project):
print("publish your artefacts")
@task
def after_publish(project):
build = get_devops_build(project)
build.tag_bump_and_push_release()
``` ```
### call the build ### call the build
```bash ```bash
git checkout "4.7.0" pyb prepare_release build publish after_publish
pyb publish_artifacts
``` ```

View file

@ -12,7 +12,6 @@ classDiagram
} }
class Image { class Image {
image_naming
image_dockerhub_user image_dockerhub_user
image_dockerhub_password image_dockerhub_password
image_publish_tag image_publish_tag
@ -88,15 +87,6 @@ classDiagram
release_type release_type
release_main_branch release_main_branch
release_current_branch release_current_branch
release_artifact_server_url
release_organisation
release_repository_name
release_artifact_token
}
class Artifact {
path_str
path()
type()
} }
class Credentials { class Credentials {
<<AggregateRoot>> <<AggregateRoot>>
@ -139,7 +129,6 @@ classDiagram
TerraformDomain *-- "0..1" ProviderAws: providers TerraformDomain *-- "0..1" ProviderAws: providers
Release o-- "0..1" BuildFile: primary_build_file Release o-- "0..1" BuildFile: primary_build_file
Release o-- "0..n" BuildFile: secondary_build_files Release o-- "0..n" BuildFile: secondary_build_files
Release "1" *-- "0..n" Artifact: release_artifacts
Release "1" *-- "1" Version: version Release "1" *-- "1" Version: version
BuildFile *-- "1" Version: version BuildFile *-- "1" Version: version
C4k *-- DnsRecord: dns_record C4k *-- DnsRecord: dns_record

View file

@ -16,85 +16,35 @@ We discussed how we will handle releases in cooperation with gitlab-ci.
### Outcome of Eventstroming: Events ordered by time ### Outcome of Eventstroming: Events ordered by time
* B: is the human devops 1. B: Pulls the latest changes
* S: is the build / ci system 1. B: Possibly merge/rebase with main
1.
```mermaid 1. B: starts "create release-notes"
stateDiagram-v2 1. B: commits his changes with [skip-ci].
state prepare_release { 1.
state "B: Pulls the latest changes" as pull 1. B: starts the release build and specifies major, minor, patch
state "B: Possibly merge/rebase with main" as merge 1.
state "B: starts 'create release-notes'" as rn 1. S: does a git fetch & status and checks if there are no changes at origin
state "B: commits his changes with [skip-ci]." as c1 1. S: starts tests
1. S: runs the linting
[*] --> pull 1. S: possibly does image building and image testing
pull --> merge 1.
merge --> rn 1. S: version numbers are adjusted in project.clj/package.json to full version
rn --> c1 1. S: change commit is tagged with git tag
c1 --> [*] 1. S: version numbers are adjusted in project.clj/package.json to next snapshot version
} 1. S: makes a bump commit with [skip-ci].
state release { 1. S: push to gitlab/gitea along with git tags
state "B: starts the release build and specifies major, minor, patch" as trigger 1.
state "S: does a git fetch & status and checks if there are no changes at origin" as fetch 1. S: CI starts - for a new tag
state "S: starts tests" as test 1. S: CI runs tests
state "S: runs the linting" as lint 1. S: runs the linting
state "S: possibly does image building and image testing" as image 1. S: makes artifacts
state "S: version numbers are adjusted in project.clj/package.json to full version" as vno 1. S: possibly performs image building and image testing
state "S: change commit is tagged with git tag" as c2 1. S: publishes images and artifacts
state "S: version numbers are adjusted in project.clj/package.json to next snapshot " as snap 1.
state "S: makes a bump commit with [skip-ci]." as c3 1. S: CI starts - for push with the last commit
state "S: push to gitlab/gitea along with git tags" as push 1. S: CI runs tests
1. S: performs the linting
[*] --> trigger
trigger --> fetch
fetch --> lint
fetch --> test
fetch --> image
test --> vno
lint --> vno
image --> vno
vno --> c2
c2 --> snap
snap --> c3
c3 --> push
push --> [*]
}
state ci_tag {
state "S: CI starts - for a new tag" as ct1
state "S: runs the linting" as ct2
state "S: CI runs tests" as ct3
state "S: makes artifacts" as ct4
state "S: possibly performs image building and image testing" as ct5
state "S: publishes images and artifacts" as ct6
[*] --> ct1
ct1 --> ct2
ct2 --> ct3
ct3 --> ct4
ct4 --> ct5
ct5 --> ct6
ct6 --> [*]
}
state ci_version_bump {
state "S: CI starts - for push with the last commit" as cvb1
state "S: CI runs tests" as cvb2
state "S: performs the linting" as cvb3
[*] --> cvb1
cvb1 --> cvb2
cvb2 --> cvb3
cvb3 --> [*]
}
[*] --> prepare_release
prepare_release --> release
release --> ci_tag
release --> ci_version_bump
ci_tag --> [*]
ci_version_bump --> [*]
```
## Consequences ## Consequences

8
doc/dev/releasing.md Normal file
View file

@ -0,0 +1,8 @@
adjust version no in build.py to release version no.
git commit -am "release"
git tag -am "release" [release version no]
git push --follow-tags
increase version no in build.py
git commit -am "version bump"
git push
pip3 install --upgrade ddadevops

View file

@ -1,56 +0,0 @@
from os import environ
from datetime import datetime
from pybuilder.core import task, init
from ddadevops import *
import logging
name = 'dda-backup'
MODULE = 'NOT_SET'
PROJECT_ROOT_PATH = '../..'
version = "4.12.2-dev"
@init
def initialize(project):
image_tag = version
if "dev" in image_tag:
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
input = {
"name": name,
"module": MODULE,
"stage": "notused",
"project_root_path": PROJECT_ROOT_PATH,
"build_types": ["IMAGE"],
"mixin_types": [],
"image_naming": "NAME_ONLY",
"image_tag": f"{image_tag}",
}
project.build_depends_on("ddadevops>=4.7.0")
build = DevopsImageBuild(project, input)
build.initialize_build_dir()
@task
def image(project):
build = get_devops_build(project)
build.image()
@task
def test(project):
build = get_devops_build(project)
build.test()
@task
def drun(project):
build = get_devops_build(project)
build.drun()
@task
def publish(project):
build = get_devops_build(project)
build.dockerhub_login()
build.dockerhub_publish()

View file

@ -1,79 +0,0 @@
## Init Statemachine
### Inputs
1. `restic-password: ""`
2. `restic-password-to-rotate: ""`
### Manual init the restic repository for the first time
1. apply backup-and-restore pod:
`kubectl scale deployment backup-restore --replicas=1`
2. exec into pod and execute restore pod (press tab to get your exact pod name)
`kubectl exec -it backup-restore-... -- /usr/local/bin/init.sh`
3. remove backup-and-restore pod:
`kubectl scale deployment backup-restore --replicas=0`
### Password Rotation
1. apply backup-and-restore pod:
`kubectl scale deployment backup-restore --replicas=1`
2. add new password to restic repository
`restic key add ....`
=> Trigger ::
field (1) credential current
filed (2) credential new
3. replace field (1) with (2) & clear (2)
4. remove old key - ???
`restic remove ....`
```mermaid
stateDiagram-v2
[*] --> init
init --> backup_ready: trigger, restic-password !empty
backup_ready --> new_password_added: restic-password !empty && restic-password-to-rotate !empty
new_password_added --> backup_ready: restic-password !empty && restic-password-to-rotate empty
```
### First Steps
1. Cloud Testserver hochfahren
2. Dort backup-restore deployment (leeres Secret mgl.?), neues Secret "rotation-credential-secret" als Daten
3. mounten von angelegtem Secret in Pod backup-restore
4. ba*bash*ka Skript in pod starten -> liest Secret ?leer
5. Micha cons.
```mermaid
sequenceDiagram
participant k8s
participant e as entrypoint.sh
participant rm as restic-management.clj
k8s ->> e: cronjob calls
e ->> rm: start-file
rm ->> rm: rotate
activate rm
rm ->> rm: read-backup-repository-state (state)
rm ->> rm: read-secret (backup-secret/restic-password, rotation-credential-secret/rotation-credential)
rm ->> rm: switch
activate rm
rm ->> rm: if init && restic-password != null
activate rm
rm ->> rm: init.sh
rm ->> rm: state init -> backup-ready
deactivate rm
rm ->> rm: if backup-ready && rotation-credential != null
activate rm
rm ->> rm: add-new-password-to-restic-repository.sh
rm ->> rm: state backup-ready -> new-password-added
deactivate rm
rm ->> rm: if new-password-added && rotation-credential == null
activate rm
rm ->> rm: remove-old-password-from-restic-repository.sh
rm ->> rm: state new-password-added -> backup-ready
deactivate rm
deactivate rm
rm ->> rm: store-repository-state (state)
deactivate rm
```

View file

@ -1,5 +0,0 @@
FROM ubuntu:jammy
# install it
ADD resources /tmp/
RUN /tmp/install.sh

View file

@ -1,70 +0,0 @@
backup_file_path='files'
function init-file-repo() {
if [ -z ${CERTIFICATE_FILE} ];
then
restic -r ${RESTIC_REPOSITORY}/${backup_file_path} -v init
else
restic -r ${RESTIC_REPOSITORY}/${backup_file_path} -v init --cacert ${CERTIFICATE_FILE}
fi
}
# First arg is the directory, second is optional for the path to a certificate file
function backup-directory() {
local directory="$1"; shift
if [ -z ${CERTIFICATE_FILE} ];
then
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache
cd ${directory} && restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} backup .
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune
else
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache --cacert ${CERTIFICATE_FILE}
cd ${directory} && restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} backup . --cacert ${CERTIFICATE_FILE}
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune --cacert ${CERTIFICATE_FILE}
fi
}
# First arg is the directory, the remaining args are the sub-directories (relative to the first directory) to backup.
function backup-fs-from-directory() {
local directory="$1"; shift
if [ -z ${CERTIFICATE_FILE} ];
then
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache
cd ${directory} && restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} backup $@
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune
else
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache --cacert ${CERTIFICATE_FILE}
cd ${directory} && restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} backup $@ --cacert ${CERTIFICATE_FILE}
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune --cacert ${CERTIFICATE_FILE}
fi
}
# Das tut so nicht!
function restore-directory() {
local directory="$1"; shift
local snapshot_id="${1:-latest}"; shift
if [ -z ${CERTIFICATE_FILE} ];
then
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache
rm -rf ${directory}*
restic -v -r $RESTIC_REPOSITORY/${backup_file_path} restore ${snapshot_id} --target ${directory}
else
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache --cacert ${CERTIFICATE_FILE}
rm -rf ${directory}*
restic -v -r $RESTIC_REPOSITORY/${backup_file_path} restore ${snapshot_id} --target ${directory} --cacert ${CERTIFICATE_FILE}
fi
}
function list-snapshot-files() {
if [ -z ${CERTIFICATE_FILE} ];
then
restic -r ${RESTIC_REPOSITORY}/${backup_file_path} snapshots
else
restic -r ${RESTIC_REPOSITORY}/${backup_file_path} snapshots --cacert ${CERTIFICATE_FILE}
fi
}

View file

@ -1,21 +0,0 @@
# usage: file_env VAR [DEFAULT]
# ie: file_env 'XYZ_DB_PASSWORD' 'example'
# (will allow for "$XYZ_DB_PASSWORD_FILE" to fill in the value of
# "$XYZ_DB_PASSWORD" from a file, especially for Docker's secrets feature)
function file_env() {
local var="$1"
local fileVar="${var}_FILE"
local def="${2:-}"
if [ "${!var:-}" ] && [ "${!fileVar:-}" ]; then
echo >&2 "error: both $var and $fileVar are set (but are exclusive)"
exit 1
fi
local val="$def"
if [ "${!var:-}" ]; then
val="${!var}"
elif [ "${!fileVar:-}" ]; then
val="$(< "${!fileVar}")"
fi
export "$var"="$val"
unset "$fileVar"
}

View file

@ -1,36 +0,0 @@
#!/bin/bash
set -exo pipefail
function babashka_install() {
babashka_version="1.3.189"
curl -SsLo /tmp/babashka-${babashka_version}-linux-amd64.tar.gz https://github.com/babashka/babashka/releases/download/v${babashka_version}/babashka-${babashka_version}-linux-amd64.tar.gz
curl -SsLo /tmp/checksum https://github.com/babashka/babashka/releases/download/v${babashka_version}/babashka-${babashka_version}-linux-amd64.tar.gz.sha256
echo " /tmp/babashka-$babashka_version-linux-amd64.tar.gz"|tee -a /tmp/checksum
sha256sum -c --status /tmp/checksum
tar -C /tmp -xzf /tmp/babashka-${babashka_version}-linux-amd64.tar.gz
install -m 0700 -o root -g root /tmp/bb /usr/local/bin/
}
function main() {
{
upgradeSystem
apt-get install -qqy ca-certificates curl gnupg postgresql-client-14 restic
curl -Ss --fail https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /etc/apt/trusted.gpg.d/postgresql-common_pgdg_archive_keyring.gpg
sh -c 'echo "deb [signed-by=/etc/apt/trusted.gpg.d/postgresql-common_pgdg_archive_keyring.gpg] https://apt.postgresql.org/pub/repos/apt jammy-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
upgradeSystem
babashka_install
} > /dev/null
update-ca-certificates
install -m 0400 /tmp/functions.sh /usr/local/lib/
install -m 0400 /tmp/pg-functions.sh /usr/local/lib/
install -m 0400 /tmp/file-functions.sh /usr/local/lib/
install -m 0740 /tmp/restic_management.clj /usr/local/bin/
cleanupDocker
}
source /tmp/install_functions_debian.sh
DEBIAN_FRONTEND=noninteractive DEBCONF_NOWARNINGS=yes main

View file

@ -1,149 +0,0 @@
backup_pg_role_path='pg-role'
backup_pg_database_path='pg-database'
function init-command() {
restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} -v init $@
}
function init-role-repo() {
if [ -z ${CERTIFICATE_FILE} ];
then
init-command
else
init-command --cacert ${CERTIFICATE_FILE}
fi
}
function init-database-command() {
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} -v init $@
}
function init-database-repo() {
if [ -z ${CERTIFICATE_FILE} ];
then
init-database-command
else
init-database-command --cacert ${CERTIFICATE_FILE}
fi
}
function drop-create-db() {
psql -d template1 -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
--no-password -c "DROP DATABASE \"${POSTGRES_DB}\";"
psql -d template1 -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
--no-password -c "CREATE DATABASE \"${POSTGRES_DB}\";"
}
function create-pg-pass() {
local pg_host=${POSTGRES_HOST:-localhost}
echo "${pg_host}:${POSTGRES_DB}:${POSTGRES_USER}:${POSTGRES_PASSWORD}" > /root/.pgpass
echo "${POSTGRES_HOST}:template1:${POSTGRES_USER}:${POSTGRES_PASSWORD}" >> /root/.pgpass
chmod 0600 /root/.pgpass
}
function roles-unlock-command() {
restic -v -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} unlock --cleanup-cache $@
}
function roles-forget-command() {
restic -v -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune $@
}
function backup-roles() {
local role_prefix="$1"; shift
if [ -z ${CERTIFICATE_FILE} ];
then
roles-unlock-command
pg_dumpall -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U${POSTGRES_USER} --no-password --roles-only | \
grep ${role_prefix} | restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} backup --stdin
roles-forget-command
else
roles-unlock-command --cacert ${CERTIFICATE_FILE}
pg_dumpall -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U${POSTGRES_USER} --no-password --roles-only | \
grep ${role_prefix} | restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} backup --stdin --cacert ${CERTIFICATE_FILE}
roles-forget-command --cacert ${CERTIFICATE_FILE}
fi
}
function db-unlock-command() {
restic -v -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} unlock --cleanup-cache $@
}
function db-forget-command() {
restic -v -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune $@
}
function backup-db-dump() {
if [ -z ${CERTIFICATE_FILE} ];
then
db-unlock-command
pg_dump -d ${POSTGRES_DB} -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} \
-U ${POSTGRES_USER} --no-password --serializable-deferrable | \
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} backup --stdin
db-forget-command
else
db-unlock-command --cacert ${CERTIFICATE_FILE}
pg_dump -d ${POSTGRES_DB} -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} \
-U ${POSTGRES_USER} --no-password --serializable-deferrable | \
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} backup --stdin --cacert ${CERTIFICATE_FILE}
db-forget-command --cacert ${CERTIFICATE_FILE}
fi
}
function restore-roles() {
local snapshot_id="${1:-latest}"; shift
if [ -z ${CERTIFICATE_FILE} ];
then
roles-unlock-command
restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} dump ${snapshot_id} stdin | \
psql -d template1 -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
--no-password
else
roles-unlock-command --cacert ${CERTIFICATE_FILE}
restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} dump ${snapshot_id} stdin --cacert ${CERTIFICATE_FILE} | \
psql -d template1 -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
--no-password
fi
}
function restore-db() {
local snapshot_id="${1:-latest}"; shift
if [ -z ${CERTIFICATE_FILE} ];
then
db-unlock-command
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} dump ${snapshot_id} stdin | \
psql -d ${POSTGRES_DB} -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
--no-password
else
db-unlock-command --cacert ${CERTIFICATE_FILE}
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} dump ${snapshot_id} stdin --cacert ${CERTIFICATE_FILE} | \
psql -d ${POSTGRES_DB} -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
--no-password
fi
}
function list-snapshot-roles() {
if [ -z ${CERTIFICATE_FILE} ];
then
restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} snapshots
else
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} snapshots --cacert ${CERTIFICATE_FILE}
fi
}
function list-snapshot-db() {
if [ -z ${CERTIFICATE_FILE} ];
then
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} snapshots
else
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} snapshots --cacert ${CERTIFICATE_FILE}
fi
}

View file

@ -1,51 +0,0 @@
#! /usr/bin/env bb
(ns restic-management
(:require
[clojure.spec.alpha :as s]
[clojure.java.io :as io]
[clojure.edn :as edn]))
(s/def ::state string?)
(s/def ::backup-repository-state
(s/keys :req-un [::state]))
(def state {:state ""})
(defn store-backup-repository-state [s]
(spit "backup-repository-state.edn" s))
(defn read-backup-repository-state []
(try
(with-open [r (io/reader "backup-repository-state.edn")]
(edn/read (java.io.PushbackReader. r)))
(catch java.io.IOException e
(printf "Couldn't open '%s': %s\n" "backup-repository-state.edn" (.getMessage e)))
(catch RuntimeException e
(printf "Error parsing edn file '%s': %s\n" "backup-repository-state.edn" (.getMessage e)))))
(defn read-secret [s]
(slurp (str "/var/run/secrets/" s)))
;"/var/run/secrets/rotation-credential-secret/rotation-credential"))
;(println (read-backup-repository-state))
;(println (:state (read-backup-repository-state)))
;(println (s/valid? ::backup-repository-state (read-backup-repository-state)))
(println (read-secret "rotation-credential-secret/rotation-credential"))
(println (read-secret "backup-secrets/restic-password"))
(s/def ::new-password string?)
(s/def ::old-password string?)
(s/def ::password-state
(s/keys :req-un [::new-password ::old-password]))
(defn rotate []
(let [state {:new-password (read-secret "rotation-credential-secret/rotation-credential")
:old-password (read-secret "backup-secrets/restic-password")}]
(store-backup-repository-state (prn-str state))))
(rotate)

View file

@ -1,7 +0,0 @@
FROM dda-backup:latest
# install it
RUN apt update && apt install -qqy openjdk-17-jre-headless
ADD resources /tmp/
RUN rm -rf /root/.m2
RUN /tmp/install-test.bb

View file

@ -1,4 +0,0 @@
{:deps {org.clojure/spec.alpha {:mvn/version "0.4.233"}
orchestra/orchestra {:mvn/version "2021.01.01-1"}
org.domaindrivenarchitecture/dda-backup {:mvn/version "0.1.1-SNAPSHOT"}}}

View file

@ -1,32 +0,0 @@
#!/usr/bin/env bb
(require '[babashka.tasks :as tasks])
(defn curl-and-check!
[filename artifact-url sha256-url]
(let [filepath (str "/tmp/" filename)]
(tasks/shell "curl" "-SsLo" filepath artifact-url)
(tasks/shell "curl" "-SsLo" "/tmp/checksum" sha256-url)
(tasks/shell "bash" "-c" (str "echo \" " filepath "\"|tee -a /tmp/checksum"))
;(tasks/shell "sha256sum" "-c" "--status" "/tmp/checksum")
))
(defn tar-install!
[filename binname]
(let [filepath (str "/tmp/" filename)]
(tasks/shell "tar" "-C" "/tmp" "-xzf" filepath)
(tasks/shell "install" "-m" "0700" "-o" "root" "-g" "root" (str "/tmp/" binname) "/usr/local/bin/")))
(defn install!
[filename]
(tasks/shell "install" "-m" "0700" "-o" "root" "-g" "root" (str "/tmp/" filename) "/usr/local/bin/"))
(tasks/shell "bb" "/tmp/test.bb")
(curl-and-check!
"provs-syspec.jar"
"https://repo.prod.meissa.de/attachments/0a1da41e-aa5b-4a3e-a3b1-215cf2d5b021"
"https://repo.prod.meissa.de/attachments/f227cf65-cb0f-46a7-a6cd-28f46917412a")
(install! "provs-syspec.jar")
(tasks/shell "apt" "update")
(tasks/shell "apt" "install" "-qqy" "openjdk-17-jre-headless")
(tasks/shell "java" "-jar" "/usr/local/bin/provs-syspec.jar" "local" "-c" "/tmp/spec.yml" )

View file

@ -1,7 +0,0 @@
package:
- name: "restic"
command:
- command: "bb -h"
- command: "/tmp/test.bb"

View file

@ -1,27 +0,0 @@
#!/usr/bin/env bb
(require '[babashka.tasks :as tasks]
'[dda.backup.management :as mgm])
(defn restic-repo-init!
[]
(spit "restic-pwd" "ThePassword")
(mgm/init! {:password-file "restic-pwd"
:restic-repository "restic-repo"}))
(defn restic-backup!
[]
(tasks/shell "mkdir" "test-backup")
(spit "test-backup/file" "I was here")
(tasks/shell "restic" "backup" "--password-file" "restic-pwd" "--repo" "restic-repo" "test-backup"))
(defn restic-restore!
[]
(tasks/shell "mkdir" "test-restore")
(tasks/shell "restic" "restore" "--password-file" "restic-pwd" "--repo" "restic-repo" "--target" "test-restore" "latest")
)
(restic-repo-init!)
(restic-backup!)
(restic-restore!)

View file

@ -1,56 +0,0 @@
from os import environ
from datetime import datetime
from pybuilder.core import task, init
from ddadevops import *
name = "ddadevops"
MODULE = "clj-cljs"
PROJECT_ROOT_PATH = "../.."
version = "4.13.2-dev"
@init
def initialize(project):
image_tag = version
if "dev" in image_tag:
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
input = {
"name": name,
"module": MODULE,
"stage": "notused",
"project_root_path": PROJECT_ROOT_PATH,
"build_types": ["IMAGE"],
"mixin_types": [],
"image_naming": "NAME_AND_MODULE",
"image_tag": f"{image_tag}",
}
project.build_depends_on("ddadevops>=4.0.0")
build = DevopsImageBuild(project, input)
build.initialize_build_dir()
@task
def image(project):
build = get_devops_build(project)
build.image()
@task
def drun(project):
build = get_devops_build(project)
build.drun()
@task
def test(project):
build = get_devops_build(project)
build.test()
@task
def publish(project):
build = get_devops_build(project)
build.dockerhub_login()
build.dockerhub_publish()

View file

@ -1,4 +0,0 @@
FROM node:lts-bookworm-slim
ADD resources /tmp
RUN /tmp/install.sh

View file

@ -1,45 +0,0 @@
#!/bin/bash
set -exo pipefail
function main() {
{
upgradeSystem
mkdir -p /usr/share/man/man1
apt-get -qqy install curl openjdk-17-jre-headless leiningen
# shadow-cljs
npm install -g npm
npm install -g --save-dev shadow-cljs
# download kubeconform & graalvm
kubeconform_version="0.6.4"
curl -SsLo /tmp/kubeconform-linux-amd64.tar.gz https://github.com/yannh/kubeconform/releases/download/v${kubeconform_version}/kubeconform-linux-amd64.tar.gz
curl -SsLo /tmp/CHECKSUMS https://github.com/yannh/kubeconform/releases/download/v${kubeconform_version}/CHECKSUMS
# checksum kubeconform
checksum
# install kubeconform
tar -C /usr/local/bin -xf /tmp/kubeconform-linux-amd64.tar.gz --exclude=LICENSE
#install pyb
apt-get -qqy install python3 python3-pip git
pip3 install pybuilder 'ddadevops>=4.7.0' deprecation dda-python-terraform boto3 pyyaml inflection --break-system-packages
#check
lein --help
cleanupDocker
} > /dev/null
}
function checksum() {
awk '{print $1 " /tmp/" $2}' /tmp/CHECKSUMS|sed -n '2p' > /tmp/kubeconform-checksum
cat /tmp/kubeconform-checksum
sha256sum -c --status /tmp/kubeconform-checksum
}
source /tmp/install_functions_debian.sh
DEBIAN_FRONTEND=noninteractive DEBCONF_NOWARNINGS=yes main

View file

@ -1,6 +0,0 @@
FROM debian:stable-slim
ADD resources /tmp
RUN /tmp/install.sh
ENV LANG=en_US.UTF-8 \
JAVA_HOME=/usr/lib/jvm/graalvm

View file

@ -1,57 +0,0 @@
#!/bin/bash
set -exo pipefail
function main() {
{
upgradeSystem
apt-get -qqy install curl git openjdk-17-jre-headless leiningen build-essential libz-dev zlib1g-dev
# download kubeconform & graalvm
kubeconform_version="0.6.4"
graalvm_jdk_version="21.0.2"
curl -SsLo /tmp/kubeconform-linux-amd64.tar.gz https://github.com/yannh/kubeconform/releases/download/v${kubeconform_version}/kubeconform-linux-amd64.tar.gz
curl -SsLo /tmp/CHECKSUMS https://github.com/yannh/kubeconform/releases/download/v${kubeconform_version}/CHECKSUMS
curl -SsLo /tmp/graalvm-community-jdk.tar.gz https://github.com/graalvm/graalvm-ce-builds/releases/download/jdk-${graalvm_jdk_version}/graalvm-community-jdk-${graalvm_jdk_version}_linux-x64_bin.tar.gz
curl -SsLo /tmp/graalvm-checksum https://github.com/graalvm/graalvm-ce-builds/releases/download/jdk-${graalvm_jdk_version}/graalvm-community-jdk-${graalvm_jdk_version}_linux-x64_bin.tar.gz.sha256
# checksum kubeconform & graalvm-jdk
checksum
# install kubeconform
tar -C /usr/local/bin -xf /tmp/kubeconform-linux-amd64.tar.gz --exclude=LICENSE
# install graalvm
tar -C /usr/lib/jvm/ -xf /tmp/graalvm-community-jdk.tar.gz
dirname_graalvm=$(ls /usr/lib/jvm/|grep -e graa)
ln -s /usr/lib/jvm/$dirname_graalvm /usr/lib/jvm/graalvm
ln -s /usr/lib/jvm/graalvm/bin/gu /usr/local/bin
update-alternatives --install /usr/bin/java java /usr/lib/jvm/graalvm/bin/java 2
ln -s /usr/lib/jvm/graalvm/bin/native-image /usr/local/bin
#install pyb
apt-get -qqy install python3 python3-pip
pip3 install pybuilder 'ddadevops>=4.7.0' deprecation dda-python-terraform boto3 pyyaml inflection --break-system-packages
#check
native-image --version
lein -v
cleanupDocker
} > /dev/null
}
function checksum() {
#kubeconform
awk '{print $1 " /tmp/" $2}' /tmp/CHECKSUMS|sed -n '2p' > /tmp/kubeconform-checksum
sha256sum -c --status /tmp/kubeconform-checksum
#graalvm
echo " /tmp/graalvm-community-jdk.tar.gz"|tee -a /tmp/graalvm-checksum
sha256sum -c --status /tmp/graalvm-checksum
}
source /tmp/install_functions_debian.sh
DEBIAN_FRONTEND=noninteractive DEBCONF_NOWARNINGS=yes main

View file

@ -1,18 +1,14 @@
from os import environ from os import environ
from datetime import datetime
from pybuilder.core import task, init from pybuilder.core import task, init
from ddadevops import * from ddadevops import *
name = "ddadevops" name = "clojure"
MODULE = "clj" MODULE = "image"
PROJECT_ROOT_PATH = "../.." PROJECT_ROOT_PATH = "../.."
version = "4.13.2-dev"
@init @init
def initialize(project): def initialize(project):
image_tag = version
if "dev" in image_tag:
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
input = { input = {
"name": name, "name": name,
@ -21,8 +17,6 @@ def initialize(project):
"project_root_path": PROJECT_ROOT_PATH, "project_root_path": PROJECT_ROOT_PATH,
"build_types": ["IMAGE"], "build_types": ["IMAGE"],
"mixin_types": [], "mixin_types": [],
"image_naming": "NAME_AND_MODULE",
"image_tag": f"{image_tag}",
} }
project.build_depends_on("ddadevops>=4.0.0") project.build_depends_on("ddadevops>=4.0.0")

View file

@ -1,4 +1,4 @@
FROM python:3.10-alpine FROM node:lts-buster-slim
ADD resources /tmp ADD resources /tmp
RUN /tmp/install.sh RUN /tmp/install.sh

View file

@ -0,0 +1,2 @@
d7a5cb848b783c15119316d716d8a74bf11c9e3ab050f3adf28e0678a6018467 kubeconform-v0.4.7.tar.gz
bbd3e03025168172a76c2a29e6a14c1c37e3476b30774259c3ef5952fb86f470 graalvm-ce-java11-linux-amd64-21.2.0.tar.gz

View file

@ -0,0 +1,43 @@
#!/bin/bash
set -eux
function main() {
upgradeSystem
mkdir -p /usr/share/man/man1
apt -qqy install openjdk-11-jre-headless leiningen curl build-essential libz-dev zlib1g-dev
# shadow-cljs
npm install -g --save-dev shadow-cljs
# download kubeconform & graalvm
curl -Lo /tmp/kubeconform-v0.4.7.tar.gz https://github.com/yannh/kubeconform/releases/download/v0.4.7/kubeconform-linux-amd64.tar.gz
curl -Lo /tmp/graalvm-ce-java11-linux-amd64-21.2.0.tar.gz https://github.com/graalvm/graalvm-ce-builds/releases/download/vm-21.2.0/graalvm-ce-java11-linux-amd64-21.2.0.tar.gz
# checksum
cd /tmp
sha256sum --check CHECKSUMS
# install kubeconform
tar -xf /tmp/kubeconform-v0.4.7.tar.gz
cp kubeconform /usr/local/bin
# install graalvm
tar -xzf graalvm-ce-java11-linux-amd64-21.2.0.tar.gz
mv graalvm-ce-java11-21.2.0 /usr/lib/jvm/
ln -s /usr/lib/jvm/graalvm-ce-java11-21.2.0 /usr/lib/jvm/graalvm
ln -s /usr/lib/jvm/graalvm/bin/gu /usr/local/bin
update-alternatives --install /usr/bin/java java /usr/lib/jvm/graalvm/bin/java 2
gu install native-image
ln -s /usr/lib/jvm/graalvm/bin/native-image /usr/local/bin
#install lein
/tmp/lein.sh
cleanupDocker
}
source /tmp/install_functions.sh
main

View file

@ -0,0 +1,423 @@
#!/usr/bin/env bash
# Ensure this file is executable via `chmod a+x lein`, then place it
# somewhere on your $PATH, like ~/bin. The rest of Leiningen will be
# installed upon first run into the ~/.lein/self-installs directory.
function msg {
echo "$@" 1>&2
}
export LEIN_VERSION="2.9.6"
# Must be sha256sum, will be replaced by bin/release
export LEIN_CHECKSUM='41c543f73eec4327dc20e60d5d820fc2a9dc772bc671610b9c385d9c4f5970b8'
case $LEIN_VERSION in
*SNAPSHOT) SNAPSHOT="YES" ;;
*) SNAPSHOT="NO" ;;
esac
if [[ "$CLASSPATH" != "" ]]; then
cat <<-'EOS' 1>&2
WARNING: You have $CLASSPATH set, probably by accident.
It is strongly recommended to unset this before proceeding.
EOS
fi
if [[ "$OSTYPE" == "cygwin" ]] || [[ "$OSTYPE" == "msys" ]]; then
delimiter=";"
else
delimiter=":"
fi
if [[ "$OSTYPE" == "cygwin" ]]; then
cygwin=true
else
cygwin=false
fi
function command_not_found {
msg "Leiningen couldn't find $1 in your \$PATH ($PATH), which is required."
exit 1
}
function make_native_path {
# ensure we have native paths
if $cygwin && [[ "$1" == /* ]]; then
echo -n "$(cygpath -wp "$1")"
elif [[ "$OSTYPE" == "msys" && "$1" == /?/* ]]; then
echo -n "$(sh -c "(cd $1 2</dev/null && pwd -W) || echo $1 | sed 's/^\\/\([a-z]\)/\\1:/g'")"
else
echo -n "$1"
fi
}
# usage : add_path PATH_VAR [PATH]...
function add_path {
local path_var="$1"
shift
while [ -n "$1" ];do
# http://bashify.com/?Useful_Techniques:Indirect_Variables:Indirect_Assignment
if [[ -z ${!path_var} ]]; then
export ${path_var}="$(make_native_path "$1")"
else
export ${path_var}="${!path_var}${delimiter}$(make_native_path "$1")"
fi
shift
done
}
function download_failed_message {
cat <<-EOS 1>&2
Failed to download $1 (exit code $2)
It's possible your HTTP client's certificate store does not have the
correct certificate authority needed. This is often caused by an
out-of-date version of libssl. It's also possible that you're behind a
firewall and haven't set HTTP_PROXY and HTTPS_PROXY.
EOS
}
function checksum_failed_message {
cat <<-EOS 1>&2
Failed to properly download $1
The checksum was mismatched. and we could not verify the downloaded
file. We expected a sha256 of
$2 and actually had
$3.
We used '$SHASUM_CMD' to verify the downloaded file.
EOS
}
function self_install {
if [ -r "$LEIN_JAR" ]; then
cat <<-EOS 1>&2
The self-install jar already exists at $LEIN_JAR.
If you wish to re-download, delete it and rerun "$0 self-install".
EOS
exit 1
fi
msg "Downloading Leiningen to $LEIN_JAR now..."
mkdir -p "$(dirname "$LEIN_JAR")"
LEIN_URL="https://github.com/technomancy/leiningen/releases/download/$LEIN_VERSION/leiningen-$LEIN_VERSION-standalone.zip"
$HTTP_CLIENT "$LEIN_JAR.pending" "$LEIN_URL"
local exit_code=$?
if [ $exit_code == 0 ]; then
printf "$LEIN_CHECKSUM $LEIN_JAR.pending\n" > "$LEIN_JAR.pending.shasum"
$SHASUM_CMD -c "$LEIN_JAR.pending.shasum"
if [ $? == 0 ]; then
mv -f "$LEIN_JAR.pending" "$LEIN_JAR"
else
got_sum="$($SHASUM_CMD "$LEIN_JAR.pending" | cut -f 1 -d ' ')"
checksum_failed_message "$LEIN_URL" "$LEIN_CHECKSUM" "$got_sum"
rm "$LEIN_JAR.pending" 2> /dev/null
exit 1
fi
else
rm "$LEIN_JAR.pending" 2> /dev/null
download_failed_message "$LEIN_URL" "$exit_code"
exit 1
fi
}
NOT_FOUND=1
ORIGINAL_PWD="$PWD"
while [ ! -r "$PWD/project.clj" ] && [ "$PWD" != "/" ] && [ $NOT_FOUND -ne 0 ]
do
cd ..
if [ "$(dirname "$PWD")" = "/" ]; then
NOT_FOUND=0
cd "$ORIGINAL_PWD"
fi
done
export LEIN_HOME="${LEIN_HOME:-"$HOME/.lein"}"
for f in "/etc/leinrc" "$LEIN_HOME/leinrc" ".leinrc"; do
if [ -e "$f" ]; then
source "$f"
fi
done
if $cygwin; then
export LEIN_HOME=$(cygpath -w "$LEIN_HOME")
fi
LEIN_JAR="$LEIN_HOME/self-installs/leiningen-$LEIN_VERSION-standalone.jar"
# normalize $0 on certain BSDs
if [ "$(dirname "$0")" = "." ]; then
SCRIPT="$(which "$(basename "$0")")"
if [ -z "$SCRIPT" ]; then
SCRIPT="$0"
fi
else
SCRIPT="$0"
fi
# resolve symlinks to the script itself portably
while [ -h "$SCRIPT" ] ; do
ls=$(ls -ld "$SCRIPT")
link=$(expr "$ls" : '.*-> \(.*\)$')
if expr "$link" : '/.*' > /dev/null; then
SCRIPT="$link"
else
SCRIPT="$(dirname "$SCRIPT"$)/$link"
fi
done
BIN_DIR="$(dirname "$SCRIPT")"
export LEIN_JVM_OPTS="${LEIN_JVM_OPTS-"-Xverify:none -XX:+TieredCompilation -XX:TieredStopAtLevel=1"}"
# This needs to be defined before we call HTTP_CLIENT below
if [ "$HTTP_CLIENT" = "" ]; then
if type -p curl >/dev/null 2>&1; then
if [ "$https_proxy" != "" ]; then
CURL_PROXY="-x $https_proxy"
fi
HTTP_CLIENT="curl $CURL_PROXY -f -L -o"
else
HTTP_CLIENT="wget -O"
fi
fi
# This needs to be defined before we call SHASUM_CMD below
if [ "$SHASUM_CMD" = "" ]; then
if type -p sha256sum >/dev/null 2>&1; then
export SHASUM_CMD="sha256sum"
elif type -p shasum >/dev/null 2>&1; then
export SHASUM_CMD="shasum --algorithm 256"
elif type -p sha256 >/dev/null 2>&1; then
export SHASUM_CMD="sha256 -q"
else
command_not_found sha256sum
fi
fi
# When :eval-in :classloader we need more memory
grep -E -q '^\s*:eval-in\s+:classloader\s*$' project.clj 2> /dev/null && \
export LEIN_JVM_OPTS="$LEIN_JVM_OPTS -Xms64m -Xmx512m"
if [ -r "$BIN_DIR/../src/leiningen/version.clj" ]; then
# Running from source checkout
LEIN_DIR="$(cd $(dirname "$BIN_DIR");pwd -P)"
# Need to use lein release to bootstrap the leiningen-core library (for aether)
if [ ! -r "$LEIN_DIR/leiningen-core/.lein-bootstrap" ]; then
cat <<-'EOS' 1>&2
Leiningen is missing its dependencies.
Please run "lein bootstrap" in the leiningen-core/ directory
with a stable release of Leiningen. See CONTRIBUTING.md for details.
EOS
exit 1
fi
# If project.clj for lein or leiningen-core changes, we must recalculate
LAST_PROJECT_CHECKSUM=$(cat "$LEIN_DIR/.lein-project-checksum" 2> /dev/null)
PROJECT_CHECKSUM=$(sum "$LEIN_DIR/project.clj" "$LEIN_DIR/leiningen-core/project.clj")
if [ "$PROJECT_CHECKSUM" != "$LAST_PROJECT_CHECKSUM" ]; then
if [ -r "$LEIN_DIR/.lein-classpath" ]; then
rm "$LEIN_DIR/.lein-classpath"
fi
fi
# Use bin/lein to calculate its own classpath.
if [ ! -r "$LEIN_DIR/.lein-classpath" ] && [ "$1" != "classpath" ]; then
msg "Recalculating Leiningen's classpath."
cd "$LEIN_DIR"
LEIN_NO_USER_PROFILES=1 "$LEIN_DIR/bin/lein" classpath .lein-classpath
sum "$LEIN_DIR/project.clj" "$LEIN_DIR/leiningen-core/project.clj" > \
.lein-project-checksum
cd -
fi
mkdir -p "$LEIN_DIR/target/classes"
export LEIN_JVM_OPTS="$LEIN_JVM_OPTS -Dclojure.compile.path=$LEIN_DIR/target/classes"
add_path CLASSPATH "$LEIN_DIR/leiningen-core/src/" "$LEIN_DIR/leiningen-core/resources/" \
"$LEIN_DIR/test:$LEIN_DIR/target/classes" "$LEIN_DIR/src" ":$LEIN_DIR/resources"
if [ -r "$LEIN_DIR/.lein-classpath" ]; then
add_path CLASSPATH "$(cat "$LEIN_DIR/.lein-classpath" 2> /dev/null)"
else
add_path CLASSPATH "$(cat "$LEIN_DIR/leiningen-core/.lein-bootstrap" 2> /dev/null)"
fi
else # Not running from a checkout
add_path CLASSPATH "$LEIN_JAR"
if [ "$LEIN_USE_BOOTCLASSPATH" != "no" ]; then
LEIN_JVM_OPTS="-Xbootclasspath/a:$LEIN_JAR $LEIN_JVM_OPTS"
fi
if [ ! -r "$LEIN_JAR" -a "$1" != "self-install" ]; then
self_install
fi
fi
if [ ! -x "$JAVA_CMD" ] && ! type -f java >/dev/null
then
msg "Leiningen couldn't find 'java' executable, which is required."
msg "Please either set JAVA_CMD or put java (>=1.6) in your \$PATH ($PATH)."
exit 1
fi
export LEIN_JAVA_CMD="${LEIN_JAVA_CMD:-${JAVA_CMD:-java}}"
if [[ -z "${DRIP_INIT+x}" && "$(basename "$LEIN_JAVA_CMD")" == *drip* ]]; then
export DRIP_INIT="$(printf -- '-e\n(require (quote leiningen.repl))')"
export DRIP_INIT_CLASS="clojure.main"
fi
# Support $JAVA_OPTS for backwards-compatibility.
export JVM_OPTS="${JVM_OPTS:-"$JAVA_OPTS"}"
# Handle jline issue with cygwin not propagating OSTYPE through java subprocesses: https://github.com/jline/jline2/issues/62
cygterm=false
if $cygwin; then
case "$TERM" in
rxvt* | xterm* | vt*) cygterm=true ;;
esac
fi
if $cygterm; then
LEIN_JVM_OPTS="$LEIN_JVM_OPTS -Djline.terminal=jline.UnixTerminal"
stty -icanon min 1 -echo > /dev/null 2>&1
fi
# TODO: investigate http://skife.org/java/unix/2011/06/20/really_executable_jars.html
# If you're packaging this for a package manager (.deb, homebrew, etc)
# you need to remove the self-install and upgrade functionality or see lein-pkg.
if [ "$1" = "self-install" ]; then
if [ -r "$BIN_DIR/../src/leiningen/version.clj" ]; then
cat <<-'EOS' 1>&2
Running self-install from a checkout is not supported.
See CONTRIBUTING.md for SNAPSHOT-specific build instructions.
EOS
exit 1
fi
msg "Manual self-install is deprecated; it will run automatically when necessary."
self_install
elif [ "$1" = "upgrade" ] || [ "$1" = "downgrade" ]; then
if [ "$LEIN_DIR" != "" ]; then
msg "The upgrade task is not meant to be run from a checkout."
exit 1
fi
if [ $SNAPSHOT = "YES" ]; then
cat <<-'EOS' 1>&2
The upgrade task is only meant for stable releases.
See the "Bootstrapping" section of CONTRIBUTING.md.
EOS
exit 1
fi
if [ ! -w "$SCRIPT" ]; then
msg "You do not have permission to upgrade the installation in $SCRIPT"
exit 1
else
TARGET_VERSION="${2:-stable}"
echo "The script at $SCRIPT will be upgraded to the latest $TARGET_VERSION version."
echo -n "Do you want to continue [Y/n]? "
read RESP
case "$RESP" in
y|Y|"")
echo
msg "Upgrading..."
TARGET="/tmp/lein-${$}-upgrade"
if $cygwin; then
TARGET=$(cygpath -w "$TARGET")
fi
LEIN_SCRIPT_URL="https://github.com/technomancy/leiningen/raw/$TARGET_VERSION/bin/lein"
$HTTP_CLIENT "$TARGET" "$LEIN_SCRIPT_URL"
if [ $? == 0 ]; then
cmp -s "$TARGET" "$SCRIPT"
if [ $? == 0 ]; then
msg "Leiningen is already up-to-date."
fi
mv "$TARGET" "$SCRIPT" && chmod +x "$SCRIPT"
unset CLASSPATH
exec "$SCRIPT" version
else
download_failed_message "$LEIN_SCRIPT_URL"
fi;;
*)
msg "Aborted."
exit 1;;
esac
fi
else
if $cygwin; then
# When running on Cygwin, use Windows-style paths for java
ORIGINAL_PWD=$(cygpath -w "$ORIGINAL_PWD")
fi
# apply context specific CLASSPATH entries
if [ -f .lein-classpath ]; then
add_path CLASSPATH "$(cat .lein-classpath)"
fi
if [ -n "$DEBUG" ]; then
msg "Leiningen's classpath: $CLASSPATH"
fi
if [ -r .lein-fast-trampoline ]; then
export LEIN_FAST_TRAMPOLINE='y'
fi
if [ "$LEIN_FAST_TRAMPOLINE" != "" ] && [ -r project.clj ]; then
INPUTS="$* $(cat project.clj) $LEIN_VERSION $(test -f "$LEIN_HOME/profiles.clj" && cat "$LEIN_HOME/profiles.clj") $(test -f profiles.clj && cat profiles.clj)"
INPUT_CHECKSUM=$(echo "$INPUTS" | $SHASUM_CMD | cut -f 1 -d " ")
# Just don't change :target-path in project.clj, mkay?
TRAMPOLINE_FILE="target/trampolines/$INPUT_CHECKSUM"
else
if hash mktemp 2>/dev/null; then
# Check if mktemp is available before using it
TRAMPOLINE_FILE="$(mktemp /tmp/lein-trampoline-XXXXXXXXXXXXX)"
else
TRAMPOLINE_FILE="/tmp/lein-trampoline-$$"
fi
trap 'rm -f $TRAMPOLINE_FILE' EXIT
fi
if $cygwin; then
TRAMPOLINE_FILE=$(cygpath -w "$TRAMPOLINE_FILE")
fi
if [ "$INPUT_CHECKSUM" != "" ] && [ -r "$TRAMPOLINE_FILE" ]; then
if [ -n "$DEBUG" ]; then
msg "Fast trampoline with $TRAMPOLINE_FILE."
fi
exec sh -c "exec $(cat "$TRAMPOLINE_FILE")"
else
export TRAMPOLINE_FILE
"$LEIN_JAVA_CMD" \
-Dfile.encoding=UTF-8 \
-Dmaven.wagon.http.ssl.easy=false \
-Dmaven.wagon.rto=10000 \
$LEIN_JVM_OPTS \
-Dleiningen.input-checksum="$INPUT_CHECKSUM" \
-Dleiningen.original.pwd="$ORIGINAL_PWD" \
-Dleiningen.script="$SCRIPT" \
-classpath "$CLASSPATH" \
clojure.main -m leiningen.core.main "$@"
EXIT_CODE=$?
if $cygterm ; then
stty icanon echo > /dev/null 2>&1
fi
if [ -r "$TRAMPOLINE_FILE" ] && [ "$LEIN_TRAMPOLINE_WARMUP" = "" ]; then
TRAMPOLINE="$(cat "$TRAMPOLINE_FILE")"
if [ "$INPUT_CHECKSUM" = "" ]; then # not using fast trampoline
rm "$TRAMPOLINE_FILE"
fi
if [ "$TRAMPOLINE" = "" ]; then
exit $EXIT_CODE
else
exec sh -c "exec $TRAMPOLINE"
fi
else
exit $EXIT_CODE
fi
fi
fi

View file

@ -0,0 +1,11 @@
FROM clojure
RUN apt update
RUN apt -yqq --no-install-recommends --yes install curl default-jre-headless
RUN curl -L -o /tmp/serverspec.jar \
https://github.com/DomainDrivenArchitecture/dda-serverspec-crate/releases/download/2.0.0/dda-serverspec-standalone.jar
COPY serverspec.edn /tmp/serverspec.edn
RUN java -jar /tmp/serverspec.jar /tmp/serverspec.edn -v

View file

@ -0,0 +1 @@
{}

View file

@ -1,57 +0,0 @@
from os import environ
from datetime import datetime
from pybuilder.core import task, init
from ddadevops import *
name = "ddadevops"
MODULE = "ddadevops"
PROJECT_ROOT_PATH = "../.."
version = "4.13.2-dev"
@init
def initialize(project):
image_tag = version
if "dev" in image_tag:
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
input = {
"name": name,
"module": MODULE,
"stage": "notused",
"project_root_path": PROJECT_ROOT_PATH,
"build_types": ["IMAGE"],
"mixin_types": [],
"image_naming": "NAME_ONLY",
"image_tag": f"{image_tag}",
}
project.build_depends_on("ddadevops>=4.9.0")
build = DevopsImageBuild(project, input)
build.initialize_build_dir()
@task
def image(project):
build = get_devops_build(project)
build.image()
@task
def drun(project):
build = get_devops_build(project)
build.drun()
@task
def test(project):
build = get_devops_build(project)
build.test()
@task
def publish(project):
build = get_devops_build(project)
build.dockerhub_login()
build.dockerhub_publish()

View file

@ -1,5 +0,0 @@
FROM python:3.10-alpine
ADD resources /tmp
RUN /tmp/install.sh

View file

@ -1,19 +0,0 @@
#!/bin/sh
set -exo pipefail
function main() {
{
upgradeSystem
apk add --no-cache python3 py3-pip openssl-dev bash git curl
python3 -m pip install -U pip
pip3 install pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml inflection
cleanupDocker
} > /dev/null
}
source /tmp/install_functions_alpine.sh
main

View file

@ -1,19 +1,14 @@
from os import environ from os import environ
from datetime import datetime
from pybuilder.core import task, init from pybuilder.core import task, init
from ddadevops import * from ddadevops import *
name = "ddadevops" name = "devops-build"
MODULE = "python" MODULE = "image"
PROJECT_ROOT_PATH = "../.." PROJECT_ROOT_PATH = "../.."
version = "4.13.2-dev"
@init @init
def initialize(project): def initialize(project):
image_tag = version
if "dev" in image_tag:
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
input = { input = {
"name": name, "name": name,
@ -22,8 +17,6 @@ def initialize(project):
"project_root_path": PROJECT_ROOT_PATH, "project_root_path": PROJECT_ROOT_PATH,
"build_types": ["IMAGE"], "build_types": ["IMAGE"],
"mixin_types": [], "mixin_types": [],
"image_naming": "NAME_AND_MODULE",
"image_tag": f"{image_tag}",
} }
project.build_depends_on("ddadevops>=4.0.0") project.build_depends_on("ddadevops>=4.0.0")

View file

@ -0,0 +1,6 @@
FROM docker:latest
RUN set -eux;
RUN apk add --no-cache build-base rust python3 python3-dev py3-pip py3-setuptools py3-wheel libffi-dev openssl-dev cargo bash git;
RUN python3 -m pip install -U pip;
RUN pip3 install pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml;

View file

@ -0,0 +1,11 @@
FROM devops-build
RUN apk update
RUN apk add curl openjdk8
RUN curl -L -o /tmp/serverspec.jar \
https://github.com/DomainDrivenArchitecture/dda-serverspec-crate/releases/download/2.0.0/dda-serverspec-standalone.jar
COPY serverspec.edn /tmp/serverspec.edn
RUN java -jar /tmp/serverspec.jar /tmp/serverspec.edn -v

View file

@ -0,0 +1 @@
{}

View file

@ -1,57 +0,0 @@
from os import environ
from datetime import datetime
from pybuilder.core import task, init
from ddadevops import *
name = "ddadevops"
MODULE = "dind"
PROJECT_ROOT_PATH = "../.."
version = "4.13.2-dev"
@init
def initialize(project):
image_tag = version
if "dev" in image_tag:
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
input = {
"name": name,
"module": MODULE,
"stage": "notused",
"project_root_path": PROJECT_ROOT_PATH,
"build_types": ["IMAGE"],
"mixin_types": [],
"image_naming": "NAME_AND_MODULE",
"image_tag": f"{image_tag}",
}
project.build_depends_on("ddadevops>=4.7.0")
build = DevopsImageBuild(project, input)
build.initialize_build_dir()
@task
def image(project):
build = get_devops_build(project)
build.image()
@task
def drun(project):
build = get_devops_build(project)
build.drun()
@task
def test(project):
build = get_devops_build(project)
build.test()
@task
def publish(project):
build = get_devops_build(project)
build.dockerhub_login()
build.dockerhub_publish()

View file

@ -1,5 +0,0 @@
FROM docker:latest
WORKDIR /tmp
ADD resources ./
RUN ./install.sh

View file

@ -1,17 +0,0 @@
#!/bin/sh
set -exo pipefail
function main() {
{
upgradeSystem
apk add --no-cache python3 py3-pip openssl-dev bash git
pip3 install --break-system-packages pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml inflection
cleanupDocker
} > /dev/null
}
source /tmp/install_functions_alpine.sh
main

View file

@ -1,57 +0,0 @@
from os import environ
from datetime import datetime
from pybuilder.core import task, init
from ddadevops import *
name = "ddadevops"
MODULE = "kotlin"
PROJECT_ROOT_PATH = "../.."
version = "4.13.2-dev"
@init
def initialize(project):
image_tag = version
if "dev" in image_tag:
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
input = {
"name": name,
"module": MODULE,
"stage": "notused",
"project_root_path": PROJECT_ROOT_PATH,
"build_types": ["IMAGE"],
"mixin_types": [],
"image_naming": "NAME_AND_MODULE",
"image_tag": f"{image_tag}",
}
project.build_depends_on("ddadevops>=4.0.0")
build = DevopsImageBuild(project, input)
build.initialize_build_dir()
@task
def image(project):
build = get_devops_build(project)
build.image()
@task
def drun(project):
build = get_devops_build(project)
build.drun()
@task
def test(project):
build = get_devops_build(project)
build.test()
@task
def publish(project):
build = get_devops_build(project)
build.dockerhub_login()
build.dockerhub_publish()

View file

@ -1,4 +0,0 @@
FROM debian:stable-slim
ADD resources /tmp
RUN /tmp/install.sh

View file

@ -1,17 +0,0 @@
#!/bin/bash
set -exo pipefail
function main() {
{
upgradeSystem
apt-get -qqy install curl git kotlin gradle iputils-ping ssh python3 python3-pip
pip3 install --break-system-packages pybuilder 'ddadevops>=4.7.0' deprecation dda-python-terraform boto3 pyyaml inflection
cleanupDocker
} > /dev/null
}
source /tmp/install_functions_debian.sh
DEBIAN_FRONTEND=noninteractive DEBCONF_NOWARNINGS=yes main

View file

@ -1,20 +0,0 @@
#!/bin/sh
set -exo pipefail
function main() {
{
upgradeSystem
apk add --no-cache build-base rust python3 python3-dev py3-pip py3-setuptools py3-wheel libffi-dev openssl-dev cargo bash git curl
python3 -m pip install -U pip
pip3 install pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml inflection \
coverage flake8 flake8-polyfill mypy mypy-extensions pycodestyle pyflakes pylint pytest pytest-cov pytest-datafiles types-setuptools types-PyYAML
cleanupDocker
} > /dev/null
}
source /tmp/install_functions_alpine.sh
main

View file

@ -4,9 +4,7 @@ from ..infrastructure import FileApi, ResourceApi, ImageApi
class ImageBuildService: class ImageBuildService:
def __init__( def __init__(self, file_api: FileApi, resource_api: ResourceApi, image_api: ImageApi):
self, file_api: FileApi, resource_api: ResourceApi, image_api: ImageApi
):
self.file_api = file_api self.file_api = file_api
self.resource_api = resource_api self.resource_api = resource_api
self.image_api = image_api self.image_api = image_api
@ -20,9 +18,7 @@ class ImageBuildService:
) )
def __copy_build_resource_file_from_package__(self, resource_name, devops: Devops): def __copy_build_resource_file_from_package__(self, resource_name, devops: Devops):
data = self.resource_api.read_resource( data = self.resource_api.read_resource(f"src/main/resources/docker/{resource_name}")
f"src/main/resources/docker/{resource_name}"
)
self.file_api.write_data_to_file( self.file_api.write_data_to_file(
Path(f"{devops.build_path()}/{resource_name}"), data Path(f"{devops.build_path()}/{resource_name}"), data
) )
@ -31,16 +27,12 @@ class ImageBuildService:
self.__copy_build_resource_file_from_package__( self.__copy_build_resource_file_from_package__(
"image/resources/install_functions.sh", devops "image/resources/install_functions.sh", devops
) )
self.__copy_build_resource_file_from_package__(
"image/resources/install_functions_debian.sh", devops
)
self.__copy_build_resource_file_from_package__(
"image/resources/install_functions_alpine.sh", devops
)
def __copy_build_resources_from_dir__(self, devops: Devops): def __copy_build_resources_from_dir__(self, devops: Devops):
image = devops.specialized_builds[BuildType.IMAGE] image = devops.specialized_builds[BuildType.IMAGE]
self.file_api.cp_force(image.build_commons_path(), devops.build_path()) self.file_api.cp_force(
image.build_commons_path(), devops.build_path()
)
def initialize_build_dir(self, devops: Devops): def initialize_build_dir(self, devops: Devops):
image = devops.specialized_builds[BuildType.IMAGE] image = devops.specialized_builds[BuildType.IMAGE]
@ -51,18 +43,13 @@ class ImageBuildService:
else: else:
self.__copy_build_resources_from_dir__(devops) self.__copy_build_resources_from_dir__(devops)
self.file_api.cp_recursive("image", build_path) self.file_api.cp_recursive("image", build_path)
try: self.file_api.cp_recursive("test", build_path)
self.file_api.cp_recursive("test", build_path)
except:
print("Folder 'test' not found")
def image(self, devops: Devops): def image(self, devops: Devops):
image = devops.specialized_builds[BuildType.IMAGE] self.image_api.image(devops.name, devops.build_path())
self.image_api.image(image.image_name(), devops.build_path())
def drun(self, devops: Devops): def drun(self, devops: Devops):
image = devops.specialized_builds[BuildType.IMAGE] self.image_api.drun(devops.name)
self.image_api.drun(image.image_name())
def dockerhub_login(self, devops: Devops): def dockerhub_login(self, devops: Devops):
image = devops.specialized_builds[BuildType.IMAGE] image = devops.specialized_builds[BuildType.IMAGE]
@ -72,14 +59,9 @@ class ImageBuildService:
def dockerhub_publish(self, devops: Devops): def dockerhub_publish(self, devops: Devops):
image = devops.specialized_builds[BuildType.IMAGE] image = devops.specialized_builds[BuildType.IMAGE]
if image.image_tag is not None:
self.image_api.dockerhub_publish(
image.image_name(), image.image_dockerhub_user, image.image_tag
)
self.image_api.dockerhub_publish( self.image_api.dockerhub_publish(
image.image_name(), image.image_dockerhub_user, 'latest' devops.name, image.image_dockerhub_user, image.image_tag
) )
def test(self, devops: Devops): def test(self, devops: Devops):
image = devops.specialized_builds[BuildType.IMAGE] self.image_api.test(devops.name, devops.build_path())
self.image_api.test(image.image_name(), devops.build_path())

View file

@ -1,26 +1,18 @@
import json
from typing import List from typing import List
from pathlib import Path from pathlib import Path
from ..infrastructure import GitApi, ArtifactDeploymentApi, BuildFileRepository from ..infrastructure import GitApi, BuildFileRepository
from ..domain import Version, Release, ReleaseType, Artifact from ..domain import Version, Release, ReleaseType
class ReleaseService: class ReleaseService:
def __init__( def __init__(self, git_api: GitApi, build_file_repository: BuildFileRepository):
self,
git_api: GitApi,
artifact_deployment_api: ArtifactDeploymentApi,
build_file_repository: BuildFileRepository,
):
self.git_api = git_api self.git_api = git_api
self.artifact_deployment_api = artifact_deployment_api
self.build_file_repository = build_file_repository self.build_file_repository = build_file_repository
@classmethod @classmethod
def prod(cls, base_dir: str): def prod(cls, base_dir: str):
return cls( return cls(
GitApi(), GitApi(),
ArtifactDeploymentApi(),
BuildFileRepository(base_dir), BuildFileRepository(base_dir),
) )
@ -53,8 +45,7 @@ class ReleaseService:
bump_version = release_version.create_bump() bump_version = release_version.create_bump()
release_message = f"release: {release_version.to_string()}" release_message = f"release: {release_version.to_string()}"
bump_message = f"bump version to: {bump_version.to_string()}" bump_message = f"bump version to: {bump_version.to_string()}"
release_tag = f"{release.release_tag_prefix}{release_version.to_string()}" self.git_api.tag_annotated(release_version.to_string(), release_message, 0)
self.git_api.tag_annotated(release_tag, release_message, 0)
self.__set_version_and_commit__( self.__set_version_and_commit__(
bump_version, bump_version,
release.build_files(), release.build_files(),
@ -62,41 +53,6 @@ class ReleaseService:
) )
self.git_api.push_follow_tags() self.git_api.push_follow_tags()
def publish_artifacts(self, release: Release):
token = str(release.release_artifact_token)
release_id = self.__parse_forgejo_release_id__(
self.artifact_deployment_api.create_forgejo_release(
release.forgejo_release_api_endpoint(),
release.version.to_string(),
token,
)
)
artifacts_sums = []
for artifact in release.release_artifacts:
sha256 = self.artifact_deployment_api.calculate_sha256(artifact.path())
sha512 = self.artifact_deployment_api.calculate_sha512(artifact.path())
artifacts_sums += [Artifact(sha256), Artifact(sha512)]
artifacts = release.release_artifacts + artifacts_sums
print(artifacts)
for artifact in artifacts:
print(str)
self.artifact_deployment_api.add_asset_to_release(
release.forgejo_release_asset_api_endpoint(release_id),
artifact.path(),
artifact.type(),
token,
)
def __parse_forgejo_release_id__(self, release_response: str) -> int:
parsed = json.loads(release_response)
try:
result = parsed["id"]
except:
raise RuntimeError(str(parsed))
return result
def __set_version_and_commit__( def __set_version_and_commit__(
self, version: Version, build_file_ids: List[str], message: str self, version: Version, build_file_ids: List[str], message: str
): ):

View file

@ -17,7 +17,6 @@ from .provider_hetzner import Hetzner
from .provider_aws import Aws from .provider_aws import Aws
from .provs_k3s import K3s from .provs_k3s import K3s
from .release import Release from .release import Release
from .artifact import Artifact
from .credentials import Credentials, CredentialMapping, GopassType from .credentials import Credentials, CredentialMapping, GopassType
from .version import Version from .version import Version
from .build_file import BuildFileType, BuildFile from .build_file import BuildFileType, BuildFile

View file

@ -1,46 +0,0 @@
from enum import Enum
from pathlib import Path
from .common import (
Validateable,
)
class ArtifactType(Enum):
TEXT = 0
JAR = 1
class Artifact(Validateable):
def __init__(self, path: str):
self.path_str = path
def path(self) -> Path:
return Path(self.path_str)
def type(self) -> str:
suffix = self.path().suffix
match suffix:
case ".jar":
return "application/x-java-archive"
case ".js":
return "application/x-javascript"
case _:
return "text/plain"
def validate(self):
result = []
result += self.__validate_is_not_empty__("path_str")
try:
Path(self.path_str)
except Exception as e:
result += [f"path was not a valid: {e}"]
return result
def __str__(self):
return str(self.path())
def __eq__(self, other):
return other and self.__str__() == other.__str__()
def __hash__(self) -> int:
return self.__str__().__hash__()

View file

@ -11,7 +11,6 @@ class BuildFileType(Enum):
JS = ".json" JS = ".json"
JAVA_GRADLE = ".gradle" JAVA_GRADLE = ".gradle"
JAVA_CLOJURE = ".clj" JAVA_CLOJURE = ".clj"
JAVA_CLOJURE_EDN = ".edn"
PYTHON = ".py" PYTHON = ".py"
@ -42,42 +41,42 @@ class BuildFile(Validateable):
result = BuildFileType.JAVA_CLOJURE result = BuildFileType.JAVA_CLOJURE
case ".py": case ".py":
result = BuildFileType.PYTHON result = BuildFileType.PYTHON
case ".edn":
result = BuildFileType.JAVA_CLOJURE_EDN
case _: case _:
result = None result = None
return result return result
def __get_file_type_regex_str(self, file_type: BuildFileType):
match file_type:
case BuildFileType.JAVA_GRADLE:
return r"(?P<pre_version>\bversion\s?=\s?)\"(?P<version>\d*\.\d*\.\d*(-SNAPSHOT)?)\""
case BuildFileType.PYTHON:
return r"(?P<pre_version>\bversion\s?=\s?)\"(?P<version>\d*\.\d*\.\d*(-SNAPSHOT|-dev\d*)?)\""
case BuildFileType.JAVA_CLOJURE:
return r"(?P<pre_version>\(defproject\s(\S)*\s)\"(?P<version>\d*\.\d*\.\d*(-SNAPSHOT)?)\""
case BuildFileType.JAVA_CLOJURE_EDN:
return r"(?P<pre_version>\:version\s+)\"(?P<version>\d*\.\d*\.\d*(-SNAPSHOT)?)\""
case _:
return ""
def get_version(self) -> Version: def get_version(self) -> Version:
try: try:
build_file_type = self.build_file_type() match self.build_file_type():
match build_file_type:
case BuildFileType.JS: case BuildFileType.JS:
version_str = json.loads(self.content)["version"] version_str = json.loads(self.content)["version"]
case ( case BuildFileType.JAVA_GRADLE:
BuildFileType.JAVA_GRADLE # TODO: '\nversion = ' will not parse all ?!
| BuildFileType.PYTHON version_line = re.search("\nversion = .*", self.content)
| BuildFileType.JAVA_CLOJURE version_line_group = version_line.group()
| BuildFileType.JAVA_CLOJURE_EDN version_string = re.search(
): "[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?", version_line_group
version_str = re.search( )
self.__get_file_type_regex_str(build_file_type), self.content version_str = version_string.group()
).group("version") case BuildFileType.PYTHON:
# TODO: '\nversion = ' will not parse all ?!
version_line = re.search("\nversion = .*\n", self.content)
version_line_group = version_line.group()
version_string = re.search(
"[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?(-dev)?[0-9]*",
version_line_group,
)
version_str = version_string.group()
case BuildFileType.JAVA_CLOJURE:
# TODO: unsure about the trailing '\n' !
version_line = re.search("\\(defproject .*\n", self.content)
version_line_group = version_line.group()
version_string = re.search(
"[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?", version_line_group
)
version_str = version_string.group()
except: except:
raise RuntimeError(f"Version not found in file {self.file_path}") raise Exception(f"Version not found in file {self.file_path}")
result = Version.from_str(version_str, self.get_default_suffix()) result = Version.from_str(version_str, self.get_default_suffix())
result.throw_if_invalid() result.throw_if_invalid()
@ -85,32 +84,38 @@ class BuildFile(Validateable):
return result return result
def set_version(self, new_version: Version): def set_version(self, new_version: Version):
# TODO: How can we create regex-pattern constants to use them at both places?
if new_version.is_snapshot():
new_version.snapshot_suffix = self.get_default_suffix()
try: try:
build_file_type = self.build_file_type() match self.build_file_type():
match build_file_type:
case BuildFileType.JS: case BuildFileType.JS:
json_data = json.loads(self.content) json_data = json.loads(self.content)
json_data["version"] = new_version.to_string() json_data["version"] = new_version.to_string()
self.content = json.dumps(json_data, indent=4) self.content = json.dumps(json_data, indent=4)
case ( case BuildFileType.JAVA_GRADLE:
BuildFileType.JAVA_GRADLE
| BuildFileType.PYTHON
| BuildFileType.JAVA_CLOJURE
| BuildFileType.JAVA_CLOJURE_EDN
):
substitute = re.sub( substitute = re.sub(
self.__get_file_type_regex_str(build_file_type), '\nversion = "[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?"',
rf'\g<pre_version>"{new_version.to_string()}"', f'\nversion = "{new_version.to_string()}"',
self.content,
)
self.content = substitute
case BuildFileType.PYTHON:
substitute = re.sub(
'\nversion = "[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?(-dev)?[0-9]*"',
f'\nversion = "{new_version.to_string()}"',
self.content,
)
self.content = substitute
case BuildFileType.JAVA_CLOJURE:
# TODO: we should stick here on defproject instead of first line!
substitute = re.sub(
'"[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?"',
f'"{new_version.to_string()}"',
self.content, self.content,
1, 1,
) )
self.content = substitute self.content = substitute
except: except:
raise RuntimeError(f"Version not found in file {self.file_path}") raise Exception(f"Version not found in file {self.file_path}")
def get_default_suffix(self) -> str: def get_default_suffix(self) -> str:
result = "SNAPSHOT" result = "SNAPSHOT"

View file

@ -78,12 +78,6 @@ class DnsRecord(Validateable):
result.append("ipv4 & ipv6 may not both be empty.") result.append("ipv4 & ipv6 may not both be empty.")
return result return result
def ip(self) -> str:
if (self.ipv4):
return self.ipv4
else:
return self.ipv6
class Devops(Validateable): class Devops(Validateable):
def __init__( def __init__(

View file

@ -1,4 +1,3 @@
from enum import Enum
from typing import List, Dict from typing import List, Dict
from .common import ( from .common import (
filter_none, filter_none,
@ -6,23 +5,15 @@ from .common import (
) )
class NamingType(Enum):
NAME_ONLY = 1
NAME_AND_MODULE = 2
class Image(Validateable): class Image(Validateable):
def __init__( def __init__(
self, self,
inp: dict, inp: dict,
): ):
self.module = inp.get("module")
self.name = inp.get("name")
self.image_dockerhub_user = inp.get("image_dockerhub_user") self.image_dockerhub_user = inp.get("image_dockerhub_user")
self.image_dockerhub_password = inp.get("image_dockerhub_password") self.image_dockerhub_password = inp.get("image_dockerhub_password")
self.image_tag = inp.get("image_tag") self.image_tag = inp.get("image_tag")
self.image_build_commons_path = inp.get("image_build_commons_path") self.image_build_commons_path = inp.get("image_build_commons_path")
self.image_naming = NamingType[inp.get("image_naming", "NAME_ONLY")]
self.image_use_package_common_files = inp.get( self.image_use_package_common_files = inp.get(
"image_use_package_common_files", True "image_use_package_common_files", True
) )
@ -32,10 +23,8 @@ class Image(Validateable):
def validate(self) -> List[str]: def validate(self) -> List[str]:
result = [] result = []
result += self.__validate_is_not_empty__("name")
result += self.__validate_is_not_empty__("image_dockerhub_user") result += self.__validate_is_not_empty__("image_dockerhub_user")
result += self.__validate_is_not_empty__("image_dockerhub_password") result += self.__validate_is_not_empty__("image_dockerhub_password")
result += self.__validate_is_not_empty__("image_naming")
if not self.image_use_package_common_files: if not self.image_use_package_common_files:
result += self.__validate_is_not_empty__("image_build_commons_path") result += self.__validate_is_not_empty__("image_build_commons_path")
result += self.__validate_is_not_empty__("image_build_commons_dir_name") result += self.__validate_is_not_empty__("image_build_commons_dir_name")
@ -48,16 +37,6 @@ class Image(Validateable):
] ]
return "/".join(filter_none(commons_path)) + "/" return "/".join(filter_none(commons_path)) + "/"
def image_name(self) -> str:
result: List[str] = [self.name] # type: ignore
if (
self.image_naming == NamingType.NAME_AND_MODULE
and self.module
and self.module != ""
):
result.append(self.module)
return "-".join(result)
@classmethod @classmethod
def get_mapping_default(cls) -> List[Dict[str, str]]: def get_mapping_default(cls) -> List[Dict[str, str]]:
return [ return [

View file

@ -8,7 +8,7 @@ from .provider_digitalocean import Digitalocean
from .provider_hetzner import Hetzner from .provider_hetzner import Hetzner
from .c4k import C4k from .c4k import C4k
from .image import Image from .image import Image
from .release import ReleaseType, Release from .release import ReleaseType
from ..infrastructure import BuildFileRepository, CredentialsApi, EnvironmentApi, GitApi from ..infrastructure import BuildFileRepository, CredentialsApi, EnvironmentApi, GitApi
@ -69,7 +69,6 @@ class InitService:
Path(primary_build_file_id) Path(primary_build_file_id)
) )
version = primary_build_file.get_version() version = primary_build_file.get_version()
default_mappings += Release.get_mapping_default()
credentials = Credentials(inp, default_mappings) credentials = Credentials(inp, default_mappings)
authorization = self.authorization(credentials) authorization = self.authorization(credentials)
@ -112,8 +111,9 @@ class InitService:
result = {} result = {}
for name in credentials.mappings.keys(): for name in credentials.mappings.keys():
mapping = credentials.mappings[name] mapping = credentials.mappings[name]
if self.environment_api.is_defined(mapping.name_for_environment()): env_value = self.environment_api.get(mapping.name_for_environment())
result[name] = self.environment_api.get(mapping.name_for_environment()) if env_value:
result[name] = env_value
else: else:
if mapping.gopass_type() == GopassType.FIELD: if mapping.gopass_type() == GopassType.FIELD:
result[name] = self.credentials_api.gopass_field_from_path( result[name] = self.credentials_api.gopass_field_from_path(

View file

@ -36,8 +36,6 @@ class Aws(Validateable, CredentialMappingDefault):
result = {} result = {}
if self.aws_as_backend: if self.aws_as_backend:
result = { result = {
"access_key": self.aws_access_key,
"secret_key": self.aws_secret_key,
"bucket": self.aws_bucket, "bucket": self.aws_bucket,
"key": self.__bucket_key__(), "key": self.__bucket_key__(),
"region": self.aws_region, "region": self.aws_region,

View file

@ -20,14 +20,6 @@ CONFIG_CERTMANAGER = """certmanager:
""" """
CONFIG_ECHO = """echo: $echo CONFIG_ECHO = """echo: $echo
""" """
CONFIG_HETZNER_CSI = """hetzner:
hcloudApiToken:
source: "PLAIN" # PLAIN, GOPASS or PROMPT
parameter: $hcloud_api # the api key for the hetzner cloud
encryptionPassphrase:
source: "PLAIN" # PLAIN, GOPASS or PROMPT
parameter: $encryption # the encryption passphrase for created volumes
"""
class K3s(Validateable): class K3s(Validateable):
@ -36,11 +28,8 @@ class K3s(Validateable):
self.k3s_letsencrypt_email = inp.get("k3s_letsencrypt_email") self.k3s_letsencrypt_email = inp.get("k3s_letsencrypt_email")
self.k3s_letsencrypt_endpoint = inp.get("k3s_letsencrypt_endpoint", "staging") self.k3s_letsencrypt_endpoint = inp.get("k3s_letsencrypt_endpoint", "staging")
self.k3s_app_filename_to_provision = inp.get("k3s_app_filename_to_provision") self.k3s_app_filename_to_provision = inp.get("k3s_app_filename_to_provision")
self.k3s_enable_echo = inp.get("k3s_enable_echo", None) self.k3s_enable_echo = inp.get("k3s_enable_echo", "false")
self.k3s_provs_template = inp.get("k3s_provs_template", None) self.k3s_provs_template = inp.get("k3s_provs_template", None)
self.k3s_enable_hetzner_csi = inp.get("k3s_enable_hetzner_csi", False)
self.k3s_hetzner_api_token = inp.get("k3s_hetzner_api_token", None)
self.k3s_hetzner_encryption_passphrase = inp.get("k3s_hetzner_encryption_passphrase", None)
self.provision_dns: Optional[DnsRecord] = None self.provision_dns: Optional[DnsRecord] = None
def validate(self) -> List[str]: def validate(self) -> List[str]:
@ -48,9 +37,6 @@ class K3s(Validateable):
result += self.__validate_is_not_empty__("k3s_letsencrypt_email") result += self.__validate_is_not_empty__("k3s_letsencrypt_email")
result += self.__validate_is_not_empty__("k3s_letsencrypt_endpoint") result += self.__validate_is_not_empty__("k3s_letsencrypt_endpoint")
result += self.__validate_is_not_empty__("k3s_app_filename_to_provision") result += self.__validate_is_not_empty__("k3s_app_filename_to_provision")
if self.k3s_enable_hetzner_csi:
result += self.__validate_is_not_empty__("k3s_hetzner_api_token")
result += self.__validate_is_not_empty__("k3s_hetzner_encryption_passphrase")
if self.provision_dns: if self.provision_dns:
result += self.provision_dns.validate() result += self.provision_dns.validate()
return result return result
@ -75,9 +61,6 @@ class K3s(Validateable):
substitutes["letsencrypt_endpoint"] = self.k3s_letsencrypt_endpoint substitutes["letsencrypt_endpoint"] = self.k3s_letsencrypt_endpoint
if self.k3s_enable_echo is not None: if self.k3s_enable_echo is not None:
substitutes["echo"] = self.k3s_enable_echo substitutes["echo"] = self.k3s_enable_echo
if self.k3s_enable_hetzner_csi:
substitutes["hcloud_api"] = self.k3s_hetzner_api_token
substitutes["encryption"] = self.k3s_hetzner_encryption_passphrase
return self.__config_template__().substitute(substitutes) return self.__config_template__().substitute(substitutes)
def command(self, devops: Devops): def command(self, devops: Devops):
@ -86,7 +69,7 @@ class K3s(Validateable):
cmd = [ cmd = [
"provs-server.jar", "provs-server.jar",
"k3s", "k3s",
f"{self.k3s_provision_user}@{self.provision_dns.ip()}", f"{self.k3s_provision_user}@{self.provision_dns.fqdn}",
"-c", "-c",
f"{devops.build_path()}/out_k3sServerConfig.yaml", f"{devops.build_path()}/out_k3sServerConfig.yaml",
"-a", "-a",
@ -106,6 +89,4 @@ class K3s(Validateable):
template_text += CONFIG_IPV4 template_text += CONFIG_IPV4
if self.provision_dns.ipv6 is not None: if self.provision_dns.ipv6 is not None:
template_text += CONFIG_IPV6 template_text += CONFIG_IPV6
if self.k3s_enable_hetzner_csi:
template_text += CONFIG_HETZNER_CSI
return Template(template_text) return Template(template_text)

View file

@ -1,4 +1,4 @@
from typing import Optional, List, Dict from typing import Optional, List
from pathlib import Path from pathlib import Path
from .common import ( from .common import (
Validateable, Validateable,
@ -7,9 +7,6 @@ from .common import (
from .version import ( from .version import (
Version, Version,
) )
from .artifact import (
Artifact,
)
class Release(Validateable): class Release(Validateable):
@ -24,14 +21,6 @@ class Release(Validateable):
"release_secondary_build_files", [] "release_secondary_build_files", []
) )
self.version = version self.version = version
self.release_tag_prefix = inp.get("release_tag_prefix", "")
self.release_artifact_server_url = inp.get("release_artifact_server_url")
self.release_organisation = inp.get("release_organisation")
self.release_repository_name = inp.get("release_repository_name")
self.release_artifact_token = inp.get("release_artifact_token")
self.release_artifacts = []
for a in inp.get("release_artifacts", []):
self.release_artifacts.append(Artifact(a))
def update_release_type(self, release_type: ReleaseType): def update_release_type(self, release_type: ReleaseType):
self.release_type = release_type self.release_type = release_type
@ -64,44 +53,10 @@ class Release(Validateable):
and self.release_type != ReleaseType.NONE and self.release_type != ReleaseType.NONE
and self.release_main_branch != self.release_current_branch and self.release_main_branch != self.release_current_branch
): ):
result.append( result.append(f"Releases are allowed only on {self.release_main_branch}")
f"Releases are allowed only on {self.release_main_branch}"
)
return result
def validate_for_artifact(self):
result = []
result += self.__validate_is_not_empty__("release_artifact_server_url")
result += self.__validate_is_not_empty__("release_organisation")
result += self.__validate_is_not_empty__("release_repository_name")
result += self.__validate_is_not_empty__("release_artifact_token")
return result return result
def build_files(self) -> List[str]: def build_files(self) -> List[str]:
result = [self.release_primary_build_file] result = [self.release_primary_build_file]
result += self.release_secondary_build_files result += self.release_secondary_build_files
return result return result
def forgejo_release_api_endpoint(self) -> str:
validation = self.validate_for_artifact()
if validation != []:
raise RuntimeError(f"not valid for creating artifacts: {validation}")
server_url = self.release_artifact_server_url.removeprefix("/").removesuffix(
"/"
)
organisation = self.release_organisation.removeprefix("/").removesuffix("/")
repository = self.release_repository_name.removeprefix("/").removesuffix("/")
return f"{server_url}/api/v1/repos/{organisation}/{repository}/releases"
def forgejo_release_asset_api_endpoint(self, release_id: int) -> str:
return f"{self.forgejo_release_api_endpoint()}/{release_id}/assets"
@classmethod
def get_mapping_default(cls) -> List[Dict[str, str]]:
return [
{
"gopass_path": "server/meissa/repo/buero-rw",
"name": "release_artifact_token",
}
]

View file

@ -32,6 +32,12 @@ class Version(Validateable):
self.snapshot_suffix = snapshot_suffix self.snapshot_suffix = snapshot_suffix
self.default_snapshot_suffix = default_snapshot_suffix self.default_snapshot_suffix = default_snapshot_suffix
def __eq__(self, other):
return other and self.to_string() == other.to_string()
def __hash__(self) -> int:
return self.to_string().__hash__()
def is_snapshot(self): def is_snapshot(self):
return self.snapshot_suffix is not None return self.snapshot_suffix is not None
@ -133,9 +139,3 @@ class Version(Validateable):
snapshot_suffix=None, snapshot_suffix=None,
version_str=None, version_str=None,
) )
def __eq__(self, other):
return other and self.to_string() == other.to_string()
def __hash__(self) -> int:
return self.to_string().__hash__()

View file

@ -7,6 +7,5 @@ from .infrastructure import (
CredentialsApi, CredentialsApi,
GitApi, GitApi,
TerraformApi, TerraformApi,
ArtifactDeploymentApi,
) )
from .repository import DevopsRepository, BuildFileRepository from .repository import DevopsRepository, BuildFileRepository

View file

@ -53,27 +53,37 @@ class ImageApi:
self.execution_api = ExecutionApi() self.execution_api = ExecutionApi()
def image(self, name: str, path: Path): def image(self, name: str, path: Path):
self.execution_api.execute_live( self.execution_api.run_handled(
f"docker build -t {name} --file {path}/image/Dockerfile {path}/image" f"docker build -t {name} --file {path}/image/Dockerfile {path}/image"
) )
def drun(self, name: str): def drun(self, name: str):
self.execution_api.execute_live( self.execution_api.run_handled(
f'docker run -it {name} /bin/bash' f'docker run -it --entrypoint="" {name} /bin/bash'
) )
def dockerhub_login(self, username: str, password: str): def dockerhub_login(self, username: str, password: str):
self.execution_api.execute_secure( self.execution_api.run_handled(
f"docker login --username {username} --password {password}", f"docker login --username {username} --password {password}"
"docker login --username ***** --password *****",
) )
def dockerhub_publish(self, name: str, username: str, tag: str): def dockerhub_publish(self, name: str, username: str, tag=None):
self.execution_api.execute_live(f"docker tag {name} {username}/{name}:{tag}") if tag is not None:
self.execution_api.execute_live(f"docker push {username}/{name}:{tag}") self.execution_api.run_handled(
f"docker tag {name} {username}/{name}:{tag}"
)
self.execution_api.run_handled(
f"docker push {username}/{name}:{tag}"
)
self.execution_api.run_handled(
f"docker tag {name} {username}/{name}:latest"
)
self.execution_api.run_handled(
f"docker push {username}/{name}:latest"
)
def test(self, name: str, path: Path): def test(self, name: str, path: Path):
self.execution_api.execute_live( self.execution_api.run_handled(
f"docker build -t {name} -test --file {path}/test/Dockerfile {path}/test" f"docker build -t {name} -test --file {path}/test/Dockerfile {path}/test"
) )
@ -84,58 +94,40 @@ class ExecutionApi:
if dry_run: if dry_run:
print(command) print(command)
else: else:
try: # output = check_output(command, encoding="UTF-8", shell=shell)
output = run( output = run(
command, command, encoding="UTF-8", shell=shell, stdout=PIPE, check=check
shell=shell, ).stdout
check=check, output = output.rstrip()
stdout=PIPE,
stderr=PIPE,
text=True,
).stdout
output = output.rstrip()
except CalledProcessError as exc:
print(
f"Command failed with code: {exc.returncode} and message: {exc.stderr}"
)
raise exc
return output return output
def execute_secure( def execute_live(self, command, dry_run=False, shell=True):
self,
command: str,
sanitized_command: str,
dry_run=False,
shell=True,
check=True,
):
try:
output = self.execute(command, dry_run, shell, check)
return output
except CalledProcessError as exc:
sanitized_exc = exc
sanitized_exc.cmd = sanitized_command
raise sanitized_exc
def execute_live(self, command: str, dry_run=False, shell=True):
if dry_run: if dry_run:
print(command) print(command)
else: else:
process = Popen(command, shell=shell) process = Popen(command, stdout=PIPE, shell=shell)
outs, errs = process.communicate() for line in iter(process.stdout.readline, b""):
while outs is not None: print(line.decode("utf-8"), end="")
stdout.buffer.write(outs) process.stdout.close()
if process.returncode != 0: process.wait()
raise RuntimeError(f"Execute live '{command}' failed with code {process.returncode}\nerrs: {errs}")
def run_handled(self, command: str, shell=True, check=True):
try:
run(
command,
shell=shell,
check=check,
capture_output=True,
text=True)
except CalledProcessError as exc:
print("Command failed with code: ", exc.returncode, " and message:", exc.stderr)
class EnvironmentApi: class EnvironmentApi:
def get(self, key): def get(self, key):
return environ.get(key) return environ.get(key)
def is_defined(self, key):
return key in environ
class CredentialsApi: class CredentialsApi:
def __init__(self): def __init__(self):
@ -214,53 +206,3 @@ class GitApi:
class TerraformApi: class TerraformApi:
pass pass
class ArtifactDeploymentApi:
def __init__(self):
self.execution_api = ExecutionApi()
def create_forgejo_release(self, api_endpoint_url: str, tag: str, token: str):
command = (
f'curl -X "POST" "{api_endpoint_url}" '
+ ' -H "accept: application/json" -H "Content-Type: application/json"'
+ f' -d \'{{ "body": "Provides files for release {tag}", "tag_name": "{tag}"}}\''
) # noqa: E501
print(command + ' -H "Authorization: token xxxx"')
return self.execution_api.execute_secure(
command=command + f' -H "Authorization: token {token}"',
sanitized_command=command + ' -H "Authorization: token xxxx"',
)
def add_asset_to_release(
self,
api_endpoint_url: str,
attachment: Path,
attachment_type: str,
token: str,
):
command = (
f'curl -X "POST" "{api_endpoint_url}"'
+ ' -H "accept: application/json"'
+ ' -H "Content-Type: multipart/form-data"'
+ f' -F "attachment=@{attachment};type={attachment_type}"'
) # noqa: E501
print(command + ' -H "Authorization: token xxxx"')
return self.execution_api.execute_secure(
command=command + f' -H "Authorization: token {token}"',
sanitized_command=command + ' -H "Authorization: token xxxx"',
)
def calculate_sha256(self, path: Path):
shasum = f"{path}.sha256"
self.execution_api.execute(
f"sha256sum {path} > {shasum}",
)
return shasum
def calculate_sha512(self, path: Path):
shasum = f"{path}.sha512"
self.execution_api.execute(
f"sha512sum {path} > {shasum}",
)
return shasum

View file

@ -26,8 +26,3 @@ class ReleaseMixin(DevopsBuild):
devops = self.devops_repo.get_devops(self.project) devops = self.devops_repo.get_devops(self.project)
release = devops.mixins[MixinType.RELEASE] release = devops.mixins[MixinType.RELEASE]
self.release_service.tag_bump_and_push_release(release) self.release_service.tag_bump_and_push_release(release)
def publish_artifacts(self):
devops = self.devops_repo.get_devops(self.project)
release = devops.mixins[MixinType.RELEASE]
self.release_service.publish_artifacts(release)

View file

@ -1,11 +1,8 @@
#
#deprecated, we recommend to use install_functions_debian.sh instead. We will going to remove install_functions.sh in a future release.
#
function upgradeSystem() { function upgradeSystem() {
{ export DEBIAN_FRONTEND=noninteractive
apt-get update apt-get update > /dev/null
apt-get -qqy upgrade apt-get -y install apt-utils > /dev/null
} > /dev/null apt-get -qqy dist-upgrade > /dev/null
} }
function cleanupDocker() { function cleanupDocker() {

View file

@ -1,21 +0,0 @@
function upgradeSystem() {
apk -U upgrade
}
function cleanupDocker() {
rm -f /root/.ssh/authorized_keys
rm -f /root/.ssh/authorized_keys2
apk cache clean
rm -rf /tmp/*
find /var/cache -type f -exec rm -rf {} \;
find /var/log/ -name '*.log' -exec rm -f {} \;
}
function cleanupAmi() {
rm -f /home/ubuntu/.ssh/authorized_keys
rm -f /home/ubuntu/.ssh/authorized_keys2
cleanupDocker
}

View file

@ -1,25 +0,0 @@
function upgradeSystem() {
apt-get update
apt-get -qqy upgrade
}
function cleanupDocker() {
rm -f /root/.ssh/authorized_keys
rm -f /root/.ssh/authorized_keys2
apt-get clean
apt-get -qqy autoremove --purge
apt-get -qqy autoclean
rm -rf /var/lib/apt/lists/
rm -rf /tmp/*
find /var/cache -type f -exec rm -rf {} \;
find /var/log/ -name '*.log' -exec rm -f {} \;
}
function cleanupAmi() {
rm -f /home/ubuntu/.ssh/authorized_keys
rm -f /home/ubuntu/.ssh/authorized_keys2
cleanupDocker
}

View file

@ -7,16 +7,12 @@ from src.main.python.ddadevops.domain import (
from src.test.python.domain.helper import ( from src.test.python.domain.helper import (
BuildFileRepositoryMock, BuildFileRepositoryMock,
GitApiMock, GitApiMock,
ArtifactDeploymentApiMock,
build_devops, build_devops,
) )
from src.main.python.ddadevops.application import ReleaseService from src.main.python.ddadevops.application import ReleaseService
def test_sould_update_release_type():
def test_should_update_release_type(): sut = ReleaseService(GitApiMock(), BuildFileRepositoryMock("build.py"))
sut = ReleaseService(
GitApiMock(), ArtifactDeploymentApiMock(), BuildFileRepositoryMock("build.py")
)
devops = build_devops({}) devops = build_devops({})
release = devops.mixins[MixinType.RELEASE] release = devops.mixins[MixinType.RELEASE]
sut.update_release_type(release, "MAJOR") sut.update_release_type(release, "MAJOR")
@ -24,40 +20,3 @@ def test_should_update_release_type():
with pytest.raises(Exception): with pytest.raises(Exception):
sut.update_release_type(release, "NOT_EXISTING") sut.update_release_type(release, "NOT_EXISTING")
def test_should_publish_artifacts():
mock = ArtifactDeploymentApiMock(release='{"id": 2345}')
sut = ReleaseService(GitApiMock(), mock, BuildFileRepositoryMock())
devops = build_devops(
{
"release_artifacts": ["target/art"],
"release_artifact_server_url": "http://repo.test/",
"release_organisation": "orga",
"release_repository_name": "repo",
}
)
release = devops.mixins[MixinType.RELEASE]
sut.publish_artifacts(release)
assert "http://repo.test/api/v1/repos/orga/repo/releases/2345/assets" == mock.add_asset_to_release_api_endpoint
def test_should_throw_exception_if_there_was_an_error_in_publish_artifacts():
devops = build_devops(
{
"release_artifacts": ["target/art"],
"release_artifact_server_url": "http://repo.test/",
"release_organisation": "orga",
"release_repository_name": "repo",
}
)
release = devops.mixins[MixinType.RELEASE]
with pytest.raises(Exception):
mock = ArtifactDeploymentApiMock(release='')
sut = ReleaseService(GitApiMock(), mock, BuildFileRepositoryMock())
sut.publish_artifacts(release)
with pytest.raises(Exception):
mock = ArtifactDeploymentApiMock(release='{"message": "there was an error", "url":"some-url"}')
sut = ReleaseService(GitApiMock(), mock, BuildFileRepositoryMock())
sut.publish_artifacts(release)

View file

@ -53,11 +53,6 @@ def devops_config(overrides: dict) -> dict:
"release_current_branch": "my_feature", "release_current_branch": "my_feature",
"release_primary_build_file": "./package.json", "release_primary_build_file": "./package.json",
"release_secondary_build_file": [], "release_secondary_build_file": [],
"release_artifacts": [],
"release_artifact_token": "release_artifact_token",
"release_artifact_server_url": None,
"release_organisation": None,
"release_repository_name": None,
"credentials_mappings": [ "credentials_mappings": [
{ {
"gopass_path": "a/path", "gopass_path": "a/path",
@ -104,9 +99,6 @@ class EnvironmentApiMock:
def get(self, key): def get(self, key):
return self.mappings.get(key, None) return self.mappings.get(key, None)
def is_defined(self, key):
return key in self.mappings
class CredentialsApiMock: class CredentialsApiMock:
def __init__(self, mappings): def __init__(self, mappings):
@ -156,33 +148,5 @@ class GitApiMock:
def push(self): def push(self):
pass pass
def push_follow_tags(self):
pass
def checkout(self, branch: str): def checkout(self, branch: str):
pass pass
class ArtifactDeploymentApiMock:
def __init__(self, release=""):
self.release = release
self.create_forgejo_release_count = 0
self.add_asset_to_release_count = 0
self.add_asset_to_release_api_endpoint = ""
def create_forgejo_release(self, api_endpoint: str, tag: str, token: str):
self.create_forgejo_release_count += 1
return self.release
def add_asset_to_release(
self, api_endpoint: str, attachment: str, attachment_type: str, token: str
):
self.add_asset_to_release_api_endpoint = api_endpoint
self.add_asset_to_release_count += 1
pass
def calculate_sha256(self, path: Path):
return f"{path}.sha256"
def calculate_sha512(self, path: Path):
return f"{path}.sha512"

View file

@ -1,32 +0,0 @@
import pytest
from pybuilder.core import Project
from pathlib import Path
from src.main.python.ddadevops.domain import (
Validateable,
DnsRecord,
Devops,
BuildType,
MixinType,
Artifact,
Image,
)
from .helper import build_devops, devops_config
def test_should_validate_release():
sut = Artifact("x")
assert sut.is_valid()
sut = Artifact(None)
assert not sut.is_valid()
def test_should_calculate_type():
sut = Artifact("x.jar")
assert "application/x-java-archive" == sut.type()
sut = Artifact("x.js")
assert "application/x-javascript" == sut.type()
sut = Artifact("x.jar.sha256")
assert "text/plain" == sut.type()

View file

@ -7,7 +7,7 @@ from src.main.python.ddadevops.domain import (
) )
def test_should_validate_build_file(): def test_sould_validate_build_file():
sut = BuildFile(Path("./project.clj"), "content") sut = BuildFile(Path("./project.clj"), "content")
assert sut.is_valid() assert sut.is_valid()
@ -18,7 +18,7 @@ def test_should_validate_build_file():
assert not sut.is_valid() assert not sut.is_valid()
def test_should_calculate_build_type(): def test_sould_calculate_build_type():
sut = BuildFile(Path("./project.clj"), "content") sut = BuildFile(Path("./project.clj"), "content")
assert sut.build_file_type() == BuildFileType.JAVA_CLOJURE assert sut.build_file_type() == BuildFileType.JAVA_CLOJURE
@ -29,7 +29,7 @@ def test_should_calculate_build_type():
assert sut.build_file_type() == BuildFileType.JS assert sut.build_file_type() == BuildFileType.JS
def test_should_parse_and_set_js(): def test_sould_parse_and_set_js():
sut = BuildFile( sut = BuildFile(
Path("./package.json"), Path("./package.json"),
""" """
@ -77,7 +77,7 @@ def test_should_parse_and_set_js():
) )
def test_should_parse_and_set_version_for_gradle(): def test_sould_parse_and_set_version_for_gradle():
sut = BuildFile( sut = BuildFile(
Path("./build.gradle"), Path("./build.gradle"),
""" """
@ -97,7 +97,7 @@ version = "1.1.5-SNAPSHOT"
assert '\nversion = "2.0.0"\n' == sut.content assert '\nversion = "2.0.0"\n' == sut.content
def test_should_parse_and_set_version_for_py(): def test_sould_parse_and_set_version_for_py():
sut = BuildFile( sut = BuildFile(
Path("./build.py"), Path("./build.py"),
""" """
@ -143,7 +143,7 @@ version = "1.1.5-SNAPSHOT"
assert '\nversion = "2.0.0"\n' == sut.content assert '\nversion = "2.0.0"\n' == sut.content
def test_should_parse_and_set_version_for_clj(): def test_sould_parse_and_set_version_for_clj():
sut = BuildFile( sut = BuildFile(
Path("./project.clj"), Path("./project.clj"),
""" """
@ -182,71 +182,3 @@ def test_should_parse_and_set_version_for_clj():
'\n(defproject org.domaindrivenarchitecture/c4k-jira "2.0.0"\n:dependencies [[org.clojure/clojure "1.11.0"]]\n)\n ' '\n(defproject org.domaindrivenarchitecture/c4k-jira "2.0.0"\n:dependencies [[org.clojure/clojure "1.11.0"]]\n)\n '
== sut.content == sut.content
) )
def test_should_parse_and_set_version_for_clj_edn():
sut = BuildFile(
Path("./deps.edn"),
"""
{:project {:name org.domaindrivenarchitecture/dda-backup
:version "1.1.5-SNAPSHOT"}
}
""",
)
assert sut.get_version() == Version.from_str("1.1.5-SNAPSHOT", "SNAPSHOT")
sut = BuildFile(
Path("./deps.edn"),
"""
{:project {:name org.domaindrivenarchitecture/dda-backup
:version "1.1.5-SNAPSHOT"}
}
""",
)
sut.set_version(Version.from_str("1.1.5-SNAPSHOT", "SNAPSHOT").create_major())
assert (
'\n{:project {:name org.domaindrivenarchitecture/dda-backup\n :version "2.0.0"}\n\n}\n'
== sut.content
)
def test_should_throw_for_clj_wrong_version():
sut = BuildFile(
Path("./project.clj"),
"""
(defproject org.domaindrivenarchitecture/c4k-jira "1.1.5-Snapshot"
:description "jira c4k-installation package"
:url "https://domaindrivenarchitecture.org"
)
""",
)
with pytest.raises(RuntimeError):
sut.get_version()
def test_should_ignore_first_version_for_py():
sut = BuildFile(
Path("./build.py"),
"""
from pybuilder.core import init, use_plugin, Author
use_plugin("python.core")
name = "ddadevops"
project_version = "0.0.2-dev1"
version = "1.1.5-dev12"
summary = "tools to support builds combining gopass, terraform, dda-pallet, aws & hetzner-cloud"
""",
)
assert sut.get_version() == Version.from_str("1.1.5-dev12", "dev")
def test_should_ignore_first_version_for_gradle():
sut = BuildFile(
Path("./build.gradle"),
"""
kotlin_version = "3.3.3"
version = "1.1.5-SNAPSHOT"
""",
)
assert sut.get_version() == Version.from_str("1.1.5-SNAPSHOT", "SNAPSHOT")

View file

@ -4,11 +4,10 @@ from src.main.python.ddadevops.domain import (
Version, Version,
BuildType, BuildType,
MixinType, MixinType,
Artifact,
) )
def test_devops_creation(): def test_devops_factory():
with pytest.raises(Exception): with pytest.raises(Exception):
DevopsFactory().build_devops({"build_types": ["NOTEXISTING"]}) DevopsFactory().build_devops({"build_types": ["NOTEXISTING"]})
@ -51,7 +50,6 @@ def test_devops_creation():
assert sut is not None assert sut is not None
assert sut.specialized_builds[BuildType.C4K] is not None assert sut.specialized_builds[BuildType.C4K] is not None
def test_release_devops_creation():
sut = DevopsFactory().build_devops( sut = DevopsFactory().build_devops(
{ {
"stage": "test", "stage": "test",
@ -68,32 +66,3 @@ def test_release_devops_creation():
) )
assert sut is not None assert sut is not None
assert sut.mixins[MixinType.RELEASE] is not None assert sut.mixins[MixinType.RELEASE] is not None
sut = DevopsFactory().build_devops(
{
"stage": "test",
"name": "mybuild",
"module": "test_image",
"project_root_path": "../../..",
"build_types": [],
"mixin_types": ["RELEASE"],
"release_main_branch": "main",
"release_current_branch": "my_feature",
"release_config_file": "project.clj",
"release_artifacts": ["x.jar"],
"release_artifact_token": "y",
"release_artifact_server_url": "https://repo.prod.meissa.de",
"release_organisation": "meissa",
"release_repository_name": "provs",
},
Version.from_str("1.0.0", "SNAPSHOT"),
)
release = sut.mixins[MixinType.RELEASE]
assert release is not None
assert Artifact("x.jar") == release.release_artifacts[0]
def test_on_merge_input_should_win():
sut = DevopsFactory()
assert {'tag': 'inp'} == sut.merge(inp = {'tag': 'inp'}, context = {'tag': 'context'}, authorization={})

View file

@ -1,4 +1,6 @@
from pybuilder.core import Project from pybuilder.core import Project
import logging
from subprocess import Popen, PIPE, run
from pathlib import Path from pathlib import Path
from src.main.python.ddadevops.domain import ( from src.main.python.ddadevops.domain import (
BuildType, BuildType,
@ -12,16 +14,3 @@ def test_devops_build_commons_path():
assert image is not None assert image is not None
assert image.is_valid() assert image.is_valid()
assert "docker/" == image.build_commons_path() assert "docker/" == image.build_commons_path()
def test_should_calculate_image_name():
sut = build_devops({})
image = sut.specialized_builds[BuildType.IMAGE]
assert "name" == image.image_name()
sut = build_devops({'image_naming': "NAME_ONLY"})
image = sut.specialized_builds[BuildType.IMAGE]
assert "name" == image.image_name()
sut = build_devops({'image_naming': "NAME_AND_MODULE"})
image = sut.specialized_builds[BuildType.IMAGE]
assert "name-module" == image.image_name()

View file

@ -41,8 +41,6 @@ def test_should_calculate_backend_config():
{ {
"module": "dns_aws", "module": "dns_aws",
"stage": "prod", "stage": "prod",
"aws_access_key": "aws_access_key",
"aws_secret_key": "aws_secret_key",
"aws_bucket": "meissa-configuration", "aws_bucket": "meissa-configuration",
"aws_bucket_kms_key_id": "arn:aws:kms:eu-central-1:907507348333:alias/meissa-configuration", "aws_bucket_kms_key_id": "arn:aws:kms:eu-central-1:907507348333:alias/meissa-configuration",
"aws_region": "eu-central-1", "aws_region": "eu-central-1",
@ -50,8 +48,6 @@ def test_should_calculate_backend_config():
) )
) )
assert { assert {
"access_key": "aws_access_key",
"secret_key": "aws_secret_key",
"bucket": "meissa-configuration", "bucket": "meissa-configuration",
"key": "prod/dns_aws", "key": "prod/dns_aws",
"kms_key_id": "arn:aws:kms:eu-central-1:907507348333:alias/meissa-configuration", "kms_key_id": "arn:aws:kms:eu-central-1:907507348333:alias/meissa-configuration",

View file

@ -24,7 +24,7 @@ def test_should_calculate_command():
assert ( assert (
"provs-server.jar " "provs-server.jar "
+ "k3s " + "k3s "
+ "k3s_provision_user@::1 " + "k3s_provision_user@example.org "
+ "-c " + "-c "
+ "root_path/target/name/module/out_k3sServerConfig.yaml " + "root_path/target/name/module/out_k3sServerConfig.yaml "
+ "-a " + "-a "

View file

@ -1,4 +1,3 @@
import pytest
from pybuilder.core import Project from pybuilder.core import Project
from pathlib import Path from pathlib import Path
from src.main.python.ddadevops.domain import ( from src.main.python.ddadevops.domain import (
@ -15,7 +14,7 @@ from src.main.python.ddadevops.domain import (
from .helper import build_devops, devops_config from .helper import build_devops, devops_config
def test_should_validate_release(): def test_sould_validate_release():
sut = Release( sut = Release(
devops_config( devops_config(
{ {
@ -49,7 +48,7 @@ def test_should_validate_release():
assert not sut.is_valid() assert not sut.is_valid()
def test_should_calculate_build_files(): def test_sould_calculate_build_files():
sut = Release( sut = Release(
devops_config( devops_config(
{ {
@ -62,74 +61,3 @@ def test_should_calculate_build_files():
Version.from_str("1.3.1-SNAPSHOT", "SNAPSHOT"), Version.from_str("1.3.1-SNAPSHOT", "SNAPSHOT"),
) )
assert ["project.clj", "package.json"] == sut.build_files() assert ["project.clj", "package.json"] == sut.build_files()
def test_should_calculate_forgejo_release_api_endpoint():
sut = Release(
devops_config(
{
"release_artifacts": [],
"release_artifact_token": "y",
"release_artifact_server_url": "https://repo.prod.meissa.de",
"release_organisation": "meissa",
"release_repository_name": "provs",
}
),
Version.from_str("1.3.1-SNAPSHOT", "SNAPSHOT"),
)
assert (
"https://repo.prod.meissa.de/api/v1/repos/meissa/provs/releases"
== sut.forgejo_release_api_endpoint()
)
sut = Release(
devops_config(
{
"release_artifacts": ["x"],
"release_artifact_token": "y",
"release_artifact_server_url": "https://repo.prod.meissa.de/",
"release_organisation": "/meissa/",
"release_repository_name": "provs",
}
),
Version.from_str("1.3.1-SNAPSHOT", "SNAPSHOT"),
)
assert (
"https://repo.prod.meissa.de/api/v1/repos/meissa/provs/releases"
== sut.forgejo_release_api_endpoint()
)
assert(
"/meissa/"
== sut.release_organisation
)
with pytest.raises(Exception):
sut = Release(
devops_config(
{
"release_artifact_server_url": "https://repo.prod.meissa.de",
"release_organisation": None,
"release_repository_name": "provs",
}
),
Version.from_str("1.3.1-SNAPSHOT", "SNAPSHOT"),
)
sut.forgejo_release_api_endpoint()
def test_should_calculate_forgejo_release_asset_api_endpoint():
sut = Release(
devops_config(
{
"release_artifacts": ["x"],
"release_artifact_token": "y",
"release_artifact_server_url": "https://repo.prod.meissa.de",
"release_organisation": "meissa",
"release_repository_name": "provs",
}
),
Version.from_str("1.3.1-SNAPSHOT", "SNAPSHOT"),
)
assert (
"https://repo.prod.meissa.de/api/v1/repos/meissa/provs/releases/123/assets"
== sut.forgejo_release_asset_api_endpoint(123)
)

View file

@ -5,9 +5,7 @@ from pybuilder.core import Project
from src.main.python.ddadevops.release_mixin import ReleaseMixin from src.main.python.ddadevops.release_mixin import ReleaseMixin
from src.main.python.ddadevops.domain import Devops, Release from src.main.python.ddadevops.domain import Devops, Release
from src.main.python.ddadevops.application import ReleaseService from .domain.helper import devops_config
from src.main.python.ddadevops.infrastructure import BuildFileRepository
from .domain.helper import devops_config, GitApiMock, ArtifactDeploymentApiMock
from .resource_helper import copy_resource from .resource_helper import copy_resource
@ -16,8 +14,6 @@ def test_release_mixin(tmp_path):
copy_resource(Path("package.json"), tmp_path) copy_resource(Path("package.json"), tmp_path)
project = Project(str_tmp_path, name="name") project = Project(str_tmp_path, name="name")
os.environ["RELEASE_ARTIFACT_TOKEN"] = "ratoken"
sut = ReleaseMixin( sut = ReleaseMixin(
project, project,
devops_config( devops_config(
@ -32,37 +28,3 @@ def test_release_mixin(tmp_path):
sut.initialize_build_dir() sut.initialize_build_dir()
assert sut.build_path() == f"{str_tmp_path}/target/name/release-test" assert sut.build_path() == f"{str_tmp_path}/target/name/release-test"
def test_release_mixin_different_version_suffixes(tmp_path):
str_tmp_path = str(tmp_path)
copy_resource(Path("config.py"), tmp_path)
copy_resource(Path("config.gradle"), tmp_path)
project = Project(str_tmp_path, name="name")
os.environ["RELEASE_ARTIFACT_TOKEN"] = "ratoken"
sut = ReleaseMixin(
project,
devops_config(
{
"project_root_path": str_tmp_path,
"mixin_types": ["RELEASE"],
"build_types": [],
"module": "release-test",
"release_current_branch": "main",
"release_main_branch": "main",
"release_primary_build_file": "config.py",
"release_secondary_build_files": ["config.gradle"],
}
),
)
sut.release_service = ReleaseService(GitApiMock(), ArtifactDeploymentApiMock(), BuildFileRepository(project.basedir))
sut.initialize_build_dir()
sut.update_release_type("PATCH")
sut.prepare_release()
sut.tag_bump_and_push_release()
assert sut.release_service.build_file_repository.get(Path("config.py")).get_version().to_string() == "3.1.5-dev"
assert sut.release_service.build_file_repository.get(Path("config.gradle")).get_version().to_string() == "3.1.5-SNAPSHOT"