Merge branch 'ddd-intro'

This commit is contained in:
Michael Jerger 2023-06-02 17:11:40 +02:00
commit 90a1b342b7
91 changed files with 4286 additions and 1295 deletions

View file

@ -1,10 +1,13 @@
image: "python:3.8"
image: "python:3.10"
before_script:
- python --version
- python -m pip install --upgrade pip
- pip install -r requirements.txt
- export IMAGE_TAG=$CI_IMAGE_TAG
- export IMAGE_DOCKERHUB_USER=$DOCKERHUB_USER
- export IMAGE_DOCKERHUB_PASSWORD=$DOCKERHUB_PASSWORD
stages:
- lint&test
- upload
@ -14,20 +17,20 @@ flake8:
stage: lint&test
script:
- pip install -r dev_requirements.txt
- flake8 --max-line-length=120 --count --select=E9,F63,F7,F82 --show-source --statistics src/main/python/ddadevops/*.py
- flake8 --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics src/main/python/ddadevops/*.py
- flake8 --max-line-length=120 --count --select=E9,F63,F7,F82 --show-source --statistics src/main/python/ddadevops/
- flake8 --count --exit-zero --max-complexity=10 --max-line-length=127 --per-file-ignores="__init__.py:F401" --ignore=E722,W503 --statistics src/main/python/ddadevops/
mypy:
stage: lint&test
script:
- pip install -r dev_requirements.txt
- python -m mypy src/main/python/ddadevops/*.py --ignore-missing-imports
- python -m mypy src/main/python/ddadevops/ --ignore-missing-imports --disable-error-code=attr-defined --disable-error-code=union-attr
pylint:
stage: lint&test
script:
- pip install -r dev_requirements.txt
- pylint -d C0301,W0614,R0201,C0114,C0115,C0116,similarities,W0702,W0702,R0913,R0902,R0914,R1732 src/main/python/ddadevops/*.py
- pylint -d W0511,R0903,C0301,W0614,C0114,C0115,C0116,similarities,W1203,W0702,W0702,R0913,R0902,R0914,R1732,R1705,W0707,C0123,W0703,C0103 src/main/python/ddadevops/
pytest:
stage: lint&test

298
README.md
View file

@ -4,39 +4,102 @@
![release prod](https://github.com/DomainDrivenArchitecture/dda-devops-build/workflows/release%20prod/badge.svg)
dda-devops-build provide a environment to tie several DevOps tools together for easy interoperation. Supported tools are:
* aws with
* simple api-key auth
* mfa & assume-role auth
* hetzner with simple api-key auth
* terraform v0.11, v0.12 supporting
* local file backends
* s3 backends
* docker / dockerhub
* user / team credentials managed by gopass
* dda-pallet
dda-devops-build integrates all the tools we use to work with clouds & provide some nice functions around.
# Setup
Tools we support are
Ensure that yout python3 version is at least Python 3.7!
* terraform: for setting up the plain infrastructure around.
* docker: for creating images
* c4k: for generating kubernetes manifests
* provs: for setting up small single-node k3s clusters
* gopass: for credential management on devops computers
* cloud providers: hetzner, digitalocean, aws
In addition we provide a ReleaseMixin for release related tasks like tag / publish & version-bump
```mermaid
classDiagram
class DevopsBuild {
name()
build_path()
initialize_build_dir()
}
class DevopsTerraformBuild {
terraform_build_commons_path()
project_vars()
initialize_build_dir()
post_build()
read_output_json()
plan()
plan_fail_on_diff()
apply(auto_approve=False)
refresh()
destroy(auto_approve=False)
tf_import(tf_import_name, tf_import_resource,)
print_terraform_command(terraform)
}
class DevopsImageBuild {
def initialize_build_dir()
image()
drun()
dockerhub_login()
dockerhub_publish()
test()
}
class ReleaseMixin {
prepare_release()
tag_and_push_release()
}
class ProvsK3sBuild {
def update_runtime_config(dns_record)
write_provs_config()
provs_apply(dry_run=False)
}
class C4kBuild {
def update_runtime_config(dns_record)
def write_c4k_config()
def write_c4k_auth()
c4k_apply(dry_run=False)
}
DevopsBuild <|-- DevopsImageBuild
DevopsBuild <|-- DevopsTerraformBuild
DevopsBuild <|-- ReleaseMixin
DevopsBuild <|-- ProvsK3sBuild
DevopsBuild <|-- C4kBuild
link DevopsBuild "./doc/DevopsBuild.md"
```
Principles we follow are:
* Seperate build artefacts from version controlled code
* Domain Driven Design - in order to stay sustainable
## Installation
Ensure that yout python3 version is at least Python 3.10
```
sudo apt install python3-pip
pip3 install pip3 --upgrade
pip3 install pybuilder ddadevops deprecation
pip3 install -r requirements.txt
export PATH=$PATH:~/.local/bin
# in case of using terraform
pip3 install dda-python-terraform packaging
# in case of using AwsMixin
pip3 install boto3
# in case of using AwsMfaMixin
pip3 install boto3 mfa
```
# Example Build
## Reference
* [DevopsBuild](./doc/DevopsBuild.md)
## Example Build
lets assume the following project structure
@ -50,7 +113,7 @@ my-project
| | -> ...
```
```
```python
from pybuilder.core import task, init
from ddadevops import *
@ -58,22 +121,36 @@ name = 'my-project'
MODULE = 'my-module'
PROJECT_ROOT_PATH = '..'
class MyBuild(DevopsTerraformBuild):
pass
@init
def initialize(project):
project.build_depends_on('ddadevops>=0.5.0')
account_name = 'my-aws-account-name'
account_id = 'my-aws-account-id'
stage = 'my stage i.e. dev|test|prod'
additional_vars = {'var_to_use_insied_terraform': '...'}
additional_var_files = ['variable-' + account_name + '-' + stage + '.tfvars']
config = create_devops_terraform_build_config(stage, PROJECT_ROOT_PATH,
MODULE, additional_vars,
additional_tfvar_files=additional_var_files)
build = MyBuild(project, config)
project.build_depends_on("ddadevops>=4.0.0-dev")
config = {
"credentials_mapping": [
{
"gopass_path": environ.get("DIGITALOCEAN_TOKEN_KEY_PATH", None),
"name": "do_api_key",
},
{
"gopass_path": environ.get("HETZNER_API_KEY_PATH", None),
"name": "hetzner_api_key",
},
],
"name": name,
"module": MODULE,
"stage": environ["STAGE"],
"project_root_path": PROJECT_ROOT_PATH,
"build_types": ["TERRAFORM"],
"mixin_types": [],
"tf_provider_types": ["DIGITALOCEAN", "HETZNER"],
"tf_use_workspace": False,
"tf_terraform_semantic_version": "1.4.2",
"do_as_backend": True,
"do_bucket": "your-bucket",
}
build = DevopsTerraformBuild(project, config)
build.initialize_build_dir()
@ -86,148 +163,17 @@ def plan(project):
@task
def apply(project):
build = get_devops_build(project)
build.apply()
build.apply(True)
@task
def destroy(project):
build = get_devops_build(project)
build.destroy()
build.destroy(True)
@task
def tf_import(project):
build = get_devops_build(project)
build.tf_import('aws_resource.choosen_name', 'the_aws_id')
```
## Feature aws-backend
Will use a file `backend.dev.live.properties` where dev is the [account-name], live is the [stage].
the backend.dev.live.properties file content:
```
key = ".."
region = "the aws region"
profile = "the profile used for aws"
bucket = "the s3 bucket name"
kms_key_id = "the aws key id"
```
the build.py file content:
```
class MyBuild(AwsBackendPropertiesMixin, DevopsTerraformBuild):
pass
@init
def initialize(project):
project.build_depends_on('ddadevops>=1.0')
account_name = 'my-aws-account-name'
account_id = 'my-aws-account-id'
stage = 'my stage i.e. dev|test|prod'
additional_vars = {}
config = create_devops_terraform_build_config(stage, PROJECT_ROOT_PATH,
MODULE, additional_vars)
config = add_aws_backend_properties_mixin_config(config, account_name)
build = MyBuild(project, config)
build.initialize_build_dir()
```
## Feature aws-mfa-assume-role
In order to use aws assume role in combination with the mfa-tool (`pip install mfa`):
the build.py file content:
```
class MyBuild(class MyBuild(AwsMfaMixin, DevopsTerraformBuild):
pass
@init
def initialize(project):
project.build_depends_on('ddadevops>=1.0')
account_name = 'my-aws-account-name'
account_id = 'my-aws-account-id'
stage = 'my stage i.e. dev|test|prod'
additional_vars = {}
config = create_devops_terraform_build_config(stage, PROJECT_ROOT_PATH,
MODULE, additional_vars)
config = add_aws_backend_properties_mixin_config(config, account_name)
config = add_aws_mfa_mixin_config(config, account_id, 'eu-central-1',
mfa_role='my_developer_role',
mfa_account_prefix='company-',
mfa_login_account_suffix='users_are_defined_here')
build = MyBuild(project, config)
build.initialize_build_dir()
@task
def access(project):
build = get_devops_build(project)
build.get_mfa_session()
```
## Feature DdaDockerBuild
The docker build supports image building, tagging, testing and login to dockerhost.
For bash based builds we support often used script-parts as predefined functions [see install_functions.sh](src/main/resources/docker/image/resources/install_functions.sh).
A full working example: [doc/example/50_docker_module](doc/example/50_docker_module)
## Feature AwsRdsPgMixin
The AwsRdsPgMixin provides
* execute_pg_rds_sql - function will optionally resolve dns-c-names for trusted ssl-handshakes
* alter_db_user_password
* add_new_user
* deactivate_user
the build.py file content:
```
class MyBuild(..., AwsRdsPgMixin):
pass
@init
def initialize(project):
project.build_depends_on('ddadevops>=1.0')
...
config = add_aws_rds_pg_mixin_config(config,
stage + "-db.bcsimport.kauf." + account_name + ".breuni.de",
"kauf_bcsimport",
rds_resolve_dns=True,)
build = MyBuild(project, config)
build.initialize_build_dir()
@task
def rotate_credentials_in(project):
build = get_devops_build(project)
build.alter_db_user_password('/postgres/support')
build.alter_db_user_password('/postgres/superuser')
build.add_new_user('/postgres/superuser', '/postgres/app', 'pg_group_role')
@task
def rotate_credentials_out(project):
build = get_devops_build(project)
build.deactivate_user('/postgres/superuser', 'old_user_name')
```
## Development & mirrors
Development happens at: https://repo.prod.meissa.de/meissa/dda-devops-build
Mirrors are:
* https://gitlab.com/domaindrivenarchitecture/dda-devops-build (issues and PR, CI)
For more details about our repository model see: https://repo.prod.meissa.de/meissa/federate-your-repos
### Publish snapshot
1. every push will be published as dev-dependency
### Release
## Release
```
adjust version no in build.py to release version no.
@ -240,7 +186,7 @@ git push
pip3 install --upgrade --user ddadevops
```
# License
## License
Copyright © 2021 meissa GmbH
Copyright © 2023 meissa GmbH
Licensed under the [Apache License, Version 2.0](LICENSE) (the "License")

View file

@ -28,12 +28,12 @@ use_plugin("python.distutils")
default_task = "publish"
name = "ddadevops"
version = "3.1.3"
version = "4.0.0-dev68"
summary = "tools to support builds combining gopass, terraform, dda-pallet, aws & hetzner-cloud"
description = __doc__
authors = [Author("meissa GmbH", "buero@meissa-gmbh.de")]
url = "https://github.com/DomainDrivenArchitecture/dda-devops-build"
requires_python = ">=2.7,!=3.0,!=3.1,!=3.2,!=3.3,!=3.4" # CHECK IF NEW VERSION EXISTS
url = "https://repo.prod.meissa.de/meissa/dda-devops-build"
requires_python = ">=3.10" # CHECK IF NEW VERSION EXISTS
license = "Apache Software License"
@init
@ -43,7 +43,7 @@ def initialize(project):
project.set_property("verbose", True)
project.get_property("filter_resources_glob").append("main/python/ddadevops/__init__.py")
#project.set_property("dir_source_unittest_python", "src/unittest/python")
project.set_property("dir_source_unittest_python", "src/test/python")
project.set_property("copy_resources_target", "$dir_dist/ddadevops")
project.get_property("copy_resources_glob").append("LICENSE")
@ -60,12 +60,9 @@ def initialize(project):
project.set_property("distutils_classifiers", [
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.10',
'Operating System :: POSIX :: Linux',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',

61
doc/DevopsBuild.md Normal file
View file

@ -0,0 +1,61 @@
# DevopsBuild
DevopsBuild stellt die build Grundlagen zur Verfügung.
```mermaid
classDiagram
class DevopsBuild {
name() - the name of build
build_path() - the build dir in target
initialize_build_dir() - copy current directory & additional files to target
}
```
## Input
| name | description | default |
| ----------------- | -------------------------------------------------------------------------------------------------- | ------- |
| name | dedicated name of the build | module |
| module | module name - may result in a hierarchy like name/module | |
| stage | sth. like test, int, acc or prod | |
| project_root_path | relative path to projects root. Is used to locate the target dir | |
| build_dir_name | name of dir, build is executed in | target |
| build_types | list of special builds used. Valid values are ["IMAGE", "C4K", "K3S", "TERRAFORM"] | [] |
| mixin_types | mixins are orthoganl to builds and represent additional capabilities. Valid Values are ["RELEASE"] | [] |
## Example Usage
```python
from subprocess import run
from pybuilder.core import task, init
from ddadevops import *
name = 'my-project'
MODULE = 'my-module'
PROJECT_ROOT_PATH = '..'
@init
def initialize(project):
project.build_depends_on("ddadevops>=4.0.0")
config = {
"name": name,
"module": MODULE,
"stage": environ["STAGE"],
"project_root_path": PROJECT_ROOT_PATH,
"build_types": [],
"mixin_types": [],
}
build = DevopsTerraformBuild(project, config)
build.initialize_build_dir()
@task
def list_build_dir(project):
build = get_devops_build(project)
run(f"ls -la {build.build_path()}")
```

View file

@ -0,0 +1,21 @@
# Architecture
```mermaid
C4Context
title Architectrue od dda-devops-build
Component(buildAndMixin, "Build and Mixin", "")
Component(app, "Application", "")
Component(dom, "Domain", "")
Component(infra, "Infrastructure", "")
Rel(buildAndMixin,app, "use")
Rel(buildAndMixin,dom, "use")
Rel(app, dom, "use")
Rel(app, infra, "use")
Rel(infra, dom, "use")
UpdateLayoutConfig($c4ShapeInRow="2", $c4BoundaryInRow="1")
```

View file

@ -0,0 +1,54 @@
# Devops Frontend with application and domain
```mermaid
classDiagram
class DevopsBuild {
__init__(project, config)
do_sth(project)
}
class ProjectRepository {
get_devops(project): Devops
set_devops(project, build)
}
class Devops
class BuildService {
do_sth(project, build)
}
DevopsBuild *-- BuildService
BuildService *-- ProjectRepository
DevopsBuild *-- ProjectRepository
```
In case of simple operations we will not need the BuildService in between.
## Init Sequence
```mermaid
sequenceDiagram
MyBuild ->> DevOpsBuild: create_config
MyBuild ->> DevOpsBuild: __init__(project, config)
activate DevOpsBuild
DevOpsBuild ->> Devops: __init__
DevOpsBuild ->> ProjectRepository: set_devops(build)
deactivate DevOpsBuild
```
## do_sth Sequence
```mermaid
sequenceDiagram
MyBuild ->> DevOpsBuild: do_sth(project)
activate DevOpsBuild
DevOpsBuild ->> BuildService: do_sth(project)
activate BuildService
BuildService ->> ProjectRepository: get_devops
BuildService ->> BuildService: do_some_complicated_stuff(build)
deactivate BuildService
deactivate DevOpsBuild
```

143
doc/architecture/Domain.md Normal file
View file

@ -0,0 +1,143 @@
# Domain
```mermaid
classDiagram
class Devops {
<<AggregateRoot>>
name
module
stage
build_dir_name
project_root_path
}
class Image {
image_dockerhub_user
image_dockerhub_password
image_publish_tag
image_build_dir_name
image_use_package_common_files
image_build_commons_path
image_build_commons_dir_name
}
class C4k {
c4k_executabel_name
c4k_mixin_config
c4k_mixin_auth
c4k_grafana_cloud_user
c4k_grafana_cloud_password
}
class ProvsK3s {
k3s_provision_user
k3s_letsencrypt_email
k3s_letsencrypt_endpoint
k3s_enable_echo
k3s_app_filename_to_provision
}
class TerraformDomain {
tf_additional_vars
tf_additional_resources_from_package
tf_output_json_name
tf_use_workspace
tf_use_package_common_files
tf_build_commons_path
tf_commons_dir_name
tf_debug_print_terraform_command
tf_additional_tfvar_files
tf_terraform_semantic_version
}
class Digitalocean {
do_api_key
do_spaces_access_key
do_spaces_secret_key
do_as_backend
do_account_name
do_endpoint
do_bucket
do_bucket_key
do_region
}
class Hetzner {
hetzner_api_key
}
class Aws {
aws_account_name
}
class DnsRecord {
fqdn
ipv4
ipv6
}
class Release {
release_type
release_main_branch
release_current_branch
version
}
class Credentials {
<<AggregateRoot>>
}
class CredentialMapping {
name
gopass_path
gopass_field
gopass_type()
name_for_input()
name_for_environment ()
}
class BuildFile {
<<AggregateRoot>>
file_path [id]
content
build_file_type()
get_version()
set_version(version)
}
class Version {
to_string()
create_major()
create_minor()
create_patch()
create_bump(snapshot_suffix)
}
Devops *-- "0..1" Image: specialized_builds
Devops *-- "0..1" C4k: specialized_builds
Devops *-- "0..1" ProvsK3s: specialized_builds
Devops *-- "0..1" TerraformDomain: specialized_builds
Devops *-- "0..1" Release: mixins
TerraformDomain *-- "0..1" Digitalocean: providers
TerraformDomain *-- "0..1" Hetzner: providers
TerraformDomain *-- "0..1" Aws: providers
Release o-- "0..1" BuildFile: primary_build_file
Release o-- "0..n" BuildFile: secondary_build_files
BuildFile *-- "1" Version
C4k *-- DnsRecord: dns_record
ProvsK3s *-- DnsRecord: provision_dns
Credentials *-- "0..n" CredentialMapping: mappings[name]
Credentials *-- "0..n" CredentialMapping: default_mappings
```
# Infrastructure
```mermaid
classDiagram
class ProjectRepository {
get_devops(project): Devops
set_devops(project, build)
}
```

View file

@ -0,0 +1,81 @@
# Architecture of ReleaseMixin
[Link to live editor](https://mermaid.live/edit#pako:eNrtV99vmzAQ_lcsPzUSrUjIj8JDpUqb9jSpaqs9TJGQg6_UGxhmTNes6v8-E0MCGIem6-MiBYnz3Xd3vu8ulxccZRRwgAv4VQKP4BMjsSApUp81r54CIolEvDmbup6D6sdEn21KllB0fnWFbiEBUsBX9sx4gMKQcSbD8CwX2Q9l76Ao4w8sDh9YArUtiSR7IhI6pge3HWnl4QuT1zlrYU8sirXgfpvDLeRZwWQmti07DVRb50RIFrGccNk2tEB_A1GwjA_Cmhm2sWvL4yEP4ho-neEMHZQSxsONIDx6tGenD4BTo7oO8tV3NVLaXIBChVBoaVOFPc7MrfixcNDCtRXoRkPU8jsQTyyCVsbGbfQZMwigdQaPbHccg-zn0WflQb2TzEF8jHIt_FCqM5uTrl3HUfeo0wgVeqJQChlGWZoyacBrTS2kMCi2u2mdBOjQly2c8eh7kAPtUyXxpMVG-Ia6PjfENuwkI3TXjw3ymy0VhVTJ3mza4m5l46A6ozBhhZwY92bJ6yi1zPaort1psNSALYUALrv9v29zs2p972Pn4wgfMAJ-CyYhJJzWjO5352h3B6jpNxupOmOwfunWMhKgFEMD6FgPjIVnHXlGxo27OpzJO8baiS2oQ9iRveFtIQXj8ajvZhIRSs_erNydVeYP0QeyZ1Om-QnUqdSHyn06d2xI_4nzYcRpXeWRdWBPr4GFpyLaym3xzLbySBLvLjovi8fDRDqyqt6T-JrTG6Vu3XE6S-g-E5vhu3wNh61hrI7GIU9BakqnQ-GZVEnSf4zh5HSaICrDAfbYbG0du7v6HqmqJ3ZwCkLt4FT9m3qpJGssHyGFNVadhSkRP9d4zV-VHilldrflEQ4eSFKAg8ucKg_1X6-e9DOt2m0vVLv89yxTSlKU-hUHL_gZB-fT6cWlt_Td5dzz1ACdL30Hb5Xcny4uZjN_7k2ni5k39y5fHfxnBzG7cD135fnzxdJfrObu6vUveFPSvA)
```mermaid
sequenceDiagram
rect rgb(103, 103, 10)
build ->> ReleaseMixin: __init__(project, config_file)
activate ReleaseMixin
ReleaseMixin ->> GitApi: __init__()
ReleaseMixin ->> ReleaseTypeRepository: __init__(GitApi)
participant ReleaseType
ReleaseMixin ->> VersionRepository: __init__(config_file)
participant Version
ReleaseMixin ->> ReleaseRepository: __init__(VersionRepository, ReleaseTypeRepository, main_branch)
participant Release
end
rect rgb(10, 90, 7)
build ->> ReleaseMixin: prepare_release()
rect rgb(20, 105, 50)
ReleaseMixin ->> PrepareReleaseService: __init__(ReleaseRepository)
activate PrepareReleaseService
PrepareReleaseService ->> ReleaseRepository: get_release()
activate ReleaseRepository
ReleaseRepository ->> ReleaseTypeRepository: get_release_type()
activate ReleaseTypeRepository
ReleaseTypeRepository ->> GitApi: get_latest_commit()
activate GitApi
deactivate GitApi
ReleaseTypeRepository ->> ReleaseType:
deactivate ReleaseTypeRepository
ReleaseRepository ->> VersionRepository: get_version()
activate VersionRepository
VersionRepository ->> VersionRepository: load_file()
VersionRepository ->> VersionRepository: parse_file()
VersionRepository ->> Version: __init__(file, version_list)
deactivate VersionRepository
ReleaseRepository ->> Release: __init__(ReleaseType, Version, current_branch)
end
deactivate ReleaseRepository
activate ReleaseRepository
deactivate ReleaseRepository
rect rgb(20, 105, 50)
ReleaseMixin ->> PrepareReleaseService: write_and_commit_release()
PrepareReleaseService ->> Release: release_version()
activate Release
Release ->> Version: create_release_version()
deactivate Release
PrepareReleaseService ->> PrepareReleaseService: __write_and_commit_version(Version)
PrepareReleaseService ->> ReleaseRepository:
ReleaseRepository ->> VersionRepository: write_file(version_string)
PrepareReleaseService ->> GitApi: add()
PrepareReleaseService ->> GitApi: commit()
end
rect rgb(20, 105, 50)
ReleaseMixin ->> PrepareReleaseService: write_and_commit_bump()
PrepareReleaseService ->> Release: bump_version()
activate Release
Release ->> Version: create_bump_version()
deactivate Release
PrepareReleaseService ->> PrepareReleaseService: __write_and_commit_version(Version)
PrepareReleaseService ->> ReleaseRepository:
ReleaseRepository ->> VersionRepository: write_file(version_string)
PrepareReleaseService ->> GitApi: add()
PrepareReleaseService ->> GitApi: commit()
deactivate PrepareReleaseService
end
end
rect rgb(120, 70, 50)
build ->> ReleaseMixin: tag_and_push_release()
ReleaseMixin ->> TagAndPushReleaseService: __init__(GitApi)
activate TagAndPushReleaseService
ReleaseMixin ->> TagAndPushReleaseService: tag_and_push_release()
TagAndPushReleaseService ->> TagAndPushReleaseService: tag_release()
TagAndPushReleaseService ->> GitApi: tag_annotated()
TagAndPushReleaseService ->> TagAndPushReleaseService: push_release()
TagAndPushReleaseService ->> GitApi: push()
deactivate TagAndPushReleaseService
deactivate ReleaseMixin
end
```

View file

@ -1,6 +1,15 @@
# For local development
```
python3 -m venv ~/.venv --upgrade
source ~/.venv/bin/activate
pip3 install --upgrade pybuilder deprecation dda_python_terraform boto3
pip3 install --upgrade -r dev_requirements.txt
pip3 install --upgrade -r requirements.txt
```
# For testing a dev version
```
pyb publish upload
pip3 install --upgrade ddadevops --pre
```
```

View file

@ -5,7 +5,7 @@ name = 'example-project'
MODULE = 'docker-module'
PROJECT_ROOT_PATH = '../../..'
class MyBuild(DevopsDockerBuild):
class MyBuild(DevopsImageBuild):
pass
@init

View file

@ -1,33 +1,27 @@
from subprocess import run
from os import environ
from pybuilder.core import task, init
from ddadevops import *
import logging
name = 'clojure'
MODULE = 'docker'
PROJECT_ROOT_PATH = '../..'
name = "clojure"
MODULE = "image"
PROJECT_ROOT_PATH = "../.."
class MyBuild(DevopsDockerBuild):
pass
@init
def initialize(project):
project.build_depends_on('ddadevops>=0.13.0')
stage = 'notused'
dockerhub_user = environ.get('DOCKERHUB_USER')
if not dockerhub_user:
dockerhub_user = gopass_field_from_path('meissa/web/docker.com', 'login')
dockerhub_password = environ.get('DOCKERHUB_PASSWORD')
if not dockerhub_password:
dockerhub_password = gopass_password_from_path('meissa/web/docker.com')
tag = environ.get('CI_COMMIT_TAG')
if not tag:
tag = get_tag_from_latest_commit()
config = create_devops_docker_build_config(
stage, PROJECT_ROOT_PATH, MODULE, dockerhub_user, dockerhub_password, docker_publish_tag=tag)
build = MyBuild(project, config)
input = {
"name": name,
"module": MODULE,
"stage": "notused",
"project_root_path": PROJECT_ROOT_PATH,
"build_types": ["IMAGE"],
"mixin_types": [],
}
project.build_depends_on("ddadevops>=4.0.0-dev")
build = DevopsImageBuild(project, input)
build.initialize_build_dir()
@ -36,16 +30,19 @@ def image(project):
build = get_devops_build(project)
build.image()
@task
def drun(project):
build = get_devops_build(project)
build.drun()
@task
def test(project):
build = get_devops_build(project)
build.test()
@task
def publish(project):
build = get_devops_build(project)

View file

@ -1,4 +1,4 @@
FROM domaindrivenarchitecture/clojure
FROM clojure
RUN apt update
RUN apt -yqq --no-install-recommends --yes install curl default-jre-headless
@ -8,4 +8,4 @@ RUN curl -L -o /tmp/serverspec.jar \
COPY serverspec.edn /tmp/serverspec.edn
RUN java -jar /tmp/serverspec.jar /tmp/serverspec.edn -v
RUN java -jar /tmp/serverspec.jar /tmp/serverspec.edn -v

View file

@ -1,33 +1,27 @@
from subprocess import run
from os import environ
from pybuilder.core import task, init
from ddadevops import *
import logging
name = 'devops-build'
MODULE = 'docker'
PROJECT_ROOT_PATH = '../..'
name = "devops-build"
MODULE = "image"
PROJECT_ROOT_PATH = "../.."
class MyBuild(DevopsDockerBuild):
pass
@init
def initialize(project):
project.build_depends_on('ddadevops>=0.13.0')
stage = 'notused'
dockerhub_user = environ.get('DOCKERHUB_USER')
if not dockerhub_user:
dockerhub_user = gopass_field_from_path('meissa/web/docker.com', 'login')
dockerhub_password = environ.get('DOCKERHUB_PASSWORD')
if not dockerhub_password:
dockerhub_password = gopass_password_from_path('meissa/web/docker.com')
tag = environ.get('CI_COMMIT_TAG')
if not tag:
tag = get_tag_from_latest_commit()
config = create_devops_docker_build_config(
stage, PROJECT_ROOT_PATH, MODULE, dockerhub_user, dockerhub_password, docker_publish_tag=tag)
build = MyBuild(project, config)
input = {
"name": name,
"module": MODULE,
"stage": "notused",
"project_root_path": PROJECT_ROOT_PATH,
"build_types": ["IMAGE"],
"mixin_types": [],
}
project.build_depends_on("ddadevops>=4.0.0-dev")
build = DevopsImageBuild(project, input)
build.initialize_build_dir()
@ -36,16 +30,19 @@ def image(project):
build = get_devops_build(project)
build.image()
@task
def drun(project):
build = get_devops_build(project)
build.drun()
@task
def test(project):
build = get_devops_build(project)
build.test()
@task
def publish(project):
build = get_devops_build(project)

View file

@ -1,7 +1,7 @@
FROM docker:latest
RUN set -eux;
RUN apk add --no-cache build-base rust python3 python3-dev py3-pip py3-setuptools py3-wheel libffi-dev openssl-dev cargo;
RUN apk add --no-cache build-base rust python3 python3-dev py3-pip py3-setuptools py3-wheel libffi-dev openssl-dev cargo bash;
RUN python3 -m pip install -U pip;
RUN ln -s /usr/bin/python3 /usr/bin/python
RUN pip3 install pybuilder ddadevops deprecation dda-python-terraform boto3 mfa;
#RUN ln -s /usr/bin/python3 /usr/bin/python
RUN pip3 install pybuilder ddadevops deprecation dda-python-terraform boto3 mfa;

View file

@ -1,4 +1,4 @@
FROM domaindrivenarchitecture/devops-build
FROM devops-build
RUN apk update
RUN apk add curl openjdk8
@ -8,4 +8,4 @@ RUN curl -L -o /tmp/serverspec.jar \
COPY serverspec.edn /tmp/serverspec.edn
RUN java -jar /tmp/serverspec.jar /tmp/serverspec.edn -v
RUN java -jar /tmp/serverspec.jar /tmp/serverspec.edn -v

View file

@ -4,4 +4,5 @@ setuptools
dda-python-terraform==2.0.1
packaging
boto3
pyyaml
pyyaml
inflection

View file

@ -4,19 +4,15 @@ terraform, dda-pallet, aws & hetzner-cloud.
"""
from .python_util import execute
from .provs_k3s_mixin import ProvsK3sMixin, add_provs_k3s_mixin_config
from .aws_rds_pg_mixin import AwsRdsPgMixin, add_aws_rds_pg_mixin_config
from .aws_mfa_mixin import AwsMfaMixin, add_aws_mfa_mixin_config
from .aws_backend_properties_mixin import AwsBackendPropertiesMixin, add_aws_backend_properties_mixin_config
from .c4k_mixin import C4kMixin, add_c4k_mixin_config
from .exoscale_mixin import ExoscaleMixin, add_exoscale_mixin_config
from .digitalocean_backend_properties_mixin import DigitaloceanBackendPropertiesMixin, add_digitalocean_backend_properties_mixin_config
from .digitalocean_terraform_build import DigitaloceanTerraformBuild, create_digitalocean_terraform_build_config
from .hetzner_mixin import HetznerMixin, add_hetzner_mixin_config
from .devops_docker_build import DevopsDockerBuild, create_devops_docker_build_config
from .devops_terraform_build import DevopsTerraformBuild, create_devops_terraform_build_config
from .devops_build import DevopsBuild, create_devops_build_config, get_devops_build, get_tag_from_latest_commit
from .domain import DnsRecord, BuildType, MixinType, ReleaseType, ProviderType
from .provs_k3s_build import ProvsK3sBuild
# from .aws_mfa_mixin import AwsMfaMixin, add_aws_mfa_mixin_config
from .c4k_build import C4kBuild
from .devops_image_build import DevopsImageBuild
from .devops_terraform_build import DevopsTerraformBuild
from .devops_build import DevopsBuild, get_devops_build
from .credential import gopass_password_from_path, gopass_field_from_path
from .release_mixin import ReleaseMixin
__version__ = "${version}"

View file

@ -0,0 +1,3 @@
from .image_build_service import ImageBuildService
from .release_mixin_services import ReleaseService
from .terraform_service import TerraformService

View file

@ -0,0 +1,67 @@
from pathlib import Path
from ..domain import Devops, BuildType
from ..infrastructure import FileApi, ResourceApi, ImageApi
class ImageBuildService:
def __init__(self, file_api: FileApi, resource_api: ResourceApi, image_api: ImageApi):
self.file_api = file_api
self.resource_api = resource_api
self.image_api = image_api
@classmethod
def prod(cls):
return cls(
FileApi(),
ResourceApi(),
ImageApi(),
)
def __copy_build_resource_file_from_package__(self, resource_name, devops: Devops):
data = self.resource_api.read_resource(f"src/main/resources/docker/{resource_name}")
self.file_api.write_data_to_file(
Path(f"{devops.build_path()}/{resource_name}"), data
)
def __copy_build_resources_from_package__(self, devops: Devops):
self.__copy_build_resource_file_from_package__(
"image/resources/install_functions.sh", devops
)
def __copy_build_resources_from_dir__(self, devops: Devops):
image = devops.specialized_builds[BuildType.IMAGE]
self.file_api.cp_force(
image.build_commons_path(), devops.build_path()
)
def initialize_build_dir(self, devops: Devops):
image = devops.specialized_builds[BuildType.IMAGE]
build_path = devops.build_path()
self.file_api.clean_dir(f"{build_path}/image/resources")
if image.image_use_package_common_files:
self.__copy_build_resources_from_package__(devops)
else:
self.__copy_build_resources_from_dir__(devops)
self.file_api.cp_recursive("image", build_path)
self.file_api.cp_recursive("test", build_path)
def image(self, devops: Devops):
self.image_api.image(devops.name, devops.build_path())
def drun(self, devops: Devops):
self.image_api.drun(devops.name)
def dockerhub_login(self, devops: Devops):
image = devops.specialized_builds[BuildType.IMAGE]
self.image_api.dockerhub_login(
image.image_dockerhub_user, image.image_dockerhub_password
)
def dockerhub_publish(self, devops: Devops):
image = devops.specialized_builds[BuildType.IMAGE]
self.image_api.dockerhub_publish(
devops.name, image.image_dockerhub_user, image.image_tag
)
def test(self, devops: Devops):
self.image_api.test(devops.name, devops.build_path())

View file

@ -0,0 +1,61 @@
from typing import List
from pathlib import Path
from ..infrastructure import GitApi, BuildFileRepository
from ..domain import Version, Release, ReleaseType
class ReleaseService:
def __init__(self, git_api: GitApi, build_file_repository: BuildFileRepository):
self.git_api = git_api
self.build_file_repository = build_file_repository
@classmethod
def prod(cls, base_dir: str):
return cls(
GitApi(),
BuildFileRepository(base_dir),
)
def prepare_release(self, release: Release):
match release.release_type:
case ReleaseType.MAJOR:
version = release.version.create_major()
case ReleaseType.MINOR:
version = release.version.create_minor()
case ReleaseType.PATCH:
version = release.version.create_patch()
case ReleaseType.NONE:
return
message = f"release: {version.to_string()}"
self.__set_version_and_commit__(version, release.build_files(), message)
def tag_bump_and_push_release(self, release: Release):
match release.release_type:
case ReleaseType.MAJOR:
release_version = release.version.create_major()
case ReleaseType.MINOR:
release_version = release.version.create_minor()
case ReleaseType.PATCH:
release_version = release.version.create_patch()
case ReleaseType.NONE:
return
bump_version = release_version.create_bump("SNAPSHOT")
release_message = f"release: {release_version.to_string()}"
bump_message = f"bump version to: {bump_version.to_string()}"
self.git_api.tag_annotated(release_version.to_string(), release_message, 0)
self.__set_version_and_commit__(
bump_version,
release.build_files(),
bump_message,
)
self.git_api.push()
def __set_version_and_commit__(
self, version: Version, build_file_ids: List[str], message: str
):
for build_file_id in build_file_ids:
build_file = self.build_file_repository.get(Path(build_file_id))
build_file.set_version(version)
self.build_file_repository.write(build_file)
self.git_api.add_file(build_file.file_path)
self.git_api.commit(message)

View file

@ -0,0 +1,221 @@
from pathlib import Path
from dda_python_terraform import Terraform, IsFlagged
from packaging import version
from ..domain import Devops, BuildType
from ..infrastructure import FileApi, ResourceApi, TerraformApi
# TODO: mv more fkt to Terraform_api ?
class TerraformService:
def __init__(
self, file_api: FileApi, resource_api: ResourceApi, terraform_api: TerraformApi
):
self.file_api = file_api
self.resource_api = resource_api
self.terraform_api = terraform_api
@classmethod
def prod(cls):
return cls(
FileApi(),
ResourceApi(),
TerraformApi(),
)
def initialize_build_dir(self, devops: Devops):
terraform = devops.specialized_builds[BuildType.TERRAFORM]
if terraform.tf_use_package_common_files:
self.__copy_build_resources_from_package__(devops)
else:
self.__copy_build_resources_from_dir__(devops)
self.__copy_local_state__(devops)
self.file_api.cp("*.tf", devops.build_path(), check=False)
self.file_api.cp("*.properties", devops.build_path(), check=False)
self.file_api.cp("*.tfvars", devops.build_path(), check=False)
self.file_api.cp_recursive("scripts", devops.build_path(), check=False)
def read_output(self, devops: Devops) -> map:
terraform_domain = devops.specialized_builds[BuildType.TERRAFORM]
return self.file_api.read_json_fro_file(
Path(f"{devops.build_path()}{terraform_domain.tf_output_json_name}")
)
def plan(self, devops: Devops, fail_on_diff=False):
terraform_domain = devops.specialized_builds[BuildType.TERRAFORM]
if fail_on_diff:
detailed_exitcode = IsFlagged
else:
detailed_exitcode = None
terraform = self.__init_client__(devops)
return_code, _, stderr = terraform.plan(
detailed_exitcode=detailed_exitcode,
capture_output=False,
raise_on_error=False,
var=terraform_domain.project_vars(),
var_file=terraform_domain.tf_additional_tfvar_files,
)
self.__print_terraform_command__(terraform, devops)
if return_code not in (0, 2):
raise RuntimeError(return_code, "terraform error:", stderr)
if return_code == 2:
raise RuntimeError(return_code, "diff in config found:", stderr)
def apply(self, devops: Devops, auto_approve=False):
terraform_domain = devops.specialized_builds[BuildType.TERRAFORM]
if auto_approve:
auto_approve_flag = IsFlagged
else:
auto_approve_flag = None
terraform = self.__init_client__(devops)
if version.parse(
terraform_domain.tf_terraform_semantic_version
) >= version.parse("1.0.0"):
return_code, _, stderr = terraform.apply(
capture_output=False,
raise_on_error=True,
auto_approve=auto_approve_flag,
var=terraform_domain.project_vars(),
var_file=terraform_domain.tf_additional_tfvar_files,
)
else:
return_code, _, stderr = terraform.apply(
capture_output=False,
raise_on_error=True,
skip_plan=auto_approve,
var=terraform_domain.project_vars(),
var_file=terraform_domain.tf_additional_tfvar_files,
)
self.__print_terraform_command__(terraform, devops)
if return_code > 0:
raise RuntimeError(return_code, "terraform error:", stderr)
self.__write_output__(terraform, devops)
def refresh(self, devops: Devops):
terraform_domain = devops.specialized_builds[BuildType.TERRAFORM]
terraform = self.__init_client__(devops)
return_code, _, stderr = terraform.refresh(
var=terraform_domain.project_vars(),
var_file=terraform_domain.tf_additional_tfvar_files,
)
self.__print_terraform_command__(terraform, devops)
if return_code > 0:
raise RuntimeError(return_code, "terraform error:", stderr)
self.__write_output__(terraform, devops)
def destroy(self, devops: Devops, auto_approve=False):
terraform_domain = devops.specialized_builds[BuildType.TERRAFORM]
if auto_approve:
auto_approve_flag = IsFlagged
else:
auto_approve_flag = None
terraform = self.__init_client__(devops)
if version.parse(
terraform_domain.tf_terraform_semantic_version
) >= version.parse("1.0.0"):
return_code, _, stderr = terraform.destroy(
capture_output=False,
raise_on_error=True,
auto_approve=auto_approve_flag,
var=terraform_domain.project_vars(),
var_file=terraform_domain.tf_additional_tfvar_files,
)
else:
return_code, _, stderr = terraform.destroy(
capture_output=False,
raise_on_error=True,
force=auto_approve_flag,
var=terraform_domain.project_vars(),
var_file=terraform_domain.tf_additional_tfvar_files,
)
self.__print_terraform_command__(terraform, devops)
if return_code > 0:
raise RuntimeError(return_code, "terraform error:", stderr)
def tf_import(
self,
devops: Devops,
tf_import_name,
tf_import_resource,
):
terraform_domain = devops.specialized_builds[BuildType.TERRAFORM]
terraform = self.__init_client__(devops)
return_code, _, stderr = terraform.import_cmd(
tf_import_name,
tf_import_resource,
capture_output=False,
raise_on_error=True,
var=terraform_domain.project_vars(),
var_file=terraform_domain.tf_additional_tfvar_files,
)
self.__print_terraform_command__(terraform, devops)
if return_code > 0:
raise RuntimeError(return_code, "terraform error:", stderr)
def post_build(self, devops: Devops):
self.__rescue_local_state__(devops)
def __copy_build_resource_file_from_package__(self, resource_name, devops: Devops):
data = self.resource_api.read_resource(
f"src/main/resources/terraform/{resource_name}"
)
self.file_api.write_data_to_file(
Path(f"{devops.build_path()}/{resource_name}"), data
)
def __copy_build_resources_from_package__(self, devops: Devops):
terraform_domain = devops.specialized_builds[BuildType.TERRAFORM]
for resource in terraform_domain.resources_from_package():
self.__copy_build_resource_file_from_package__(resource, devops)
def __copy_build_resources_from_dir__(self, devops: Devops):
terraform = devops.specialized_builds[BuildType.TERRAFORM]
self.file_api.cp_force(
f"{terraform.build_commons_path()}/*", devops.build_path()
)
def __print_terraform_command__(self, terraform: Terraform, devops: Devops):
terraform_domain = devops.specialized_builds[BuildType.TERRAFORM]
if terraform_domain.tf_debug_print_terraform_command:
output = f"cd {devops.build_path()} && {terraform.latest_cmd()}"
print(output)
def __copy_local_state__(self, devops: Devops):
terraform_domain = devops.specialized_builds[BuildType.TERRAFORM]
if terraform_domain.is_local_state():
self.file_api.cp("terraform.tfstate", devops.build_path(), check=False)
def __rescue_local_state__(self, devops: Devops):
terraform_domain = devops.specialized_builds[BuildType.TERRAFORM]
if terraform_domain.is_local_state():
self.file_api.cp(
f"{devops.build_path()}/terraform.tfstate", ".", check=False
)
def __init_client__(self, devops: Devops):
terraform_domain = devops.specialized_builds[BuildType.TERRAFORM]
terraform = Terraform(
working_dir=devops.build_path(),
terraform_semantic_version=terraform_domain.tf_terraform_semantic_version,
)
if terraform_domain.is_local_state():
terraform.init()
else:
terraform.init(backend_config=terraform_domain.backend_config())
self.__print_terraform_command__(terraform, devops)
if terraform_domain.tf_use_workspace:
try:
terraform.workspace("select", devops.stage)
self.__print_terraform_command__(terraform, devops)
except:
terraform.workspace("new", devops.stage)
self.__print_terraform_command__(terraform, devops)
return terraform
def __write_output__(self, terraform, devops: Devops):
terraform_domain = devops.specialized_builds[BuildType.TERRAFORM]
result = terraform.output(json=IsFlagged)
self.__print_terraform_command__(terraform, devops)
self.file_api.write_json_to_file(
Path(f"{devops.build_path()}{terraform_domain.tf_output_json_name}"), result
)

View file

@ -1,49 +0,0 @@
from dda_python_terraform import Terraform
from .devops_terraform_build import DevopsTerraformBuild
def add_aws_backend_properties_mixin_config(config, account_name):
config.update({'AwsBackendPropertiesMixin':
{'account_name': account_name}})
return config
class AwsBackendPropertiesMixin(DevopsTerraformBuild):
def __init__(self, project, config):
super().__init__(project, config)
aws_mixin_config = config['AwsBackendPropertiesMixin']
self.account_name = aws_mixin_config['account_name']
self.backend_config = "backend." + self.account_name + "." + self.stage + ".properties"
self.additional_tfvar_files.append(self.backend_config)
def project_vars(self):
ret = super().project_vars()
ret.update({'account_name': self.account_name})
return ret
def copy_build_resources_from_package(self):
super().copy_build_resources_from_package()
self.copy_build_resource_file_from_package('provider_registry.tf')
self.copy_build_resource_file_from_package('aws_provider.tf')
self.copy_build_resource_file_from_package(
'aws_backend_properties_vars.tf')
self.copy_build_resource_file_from_package(
'aws_backend_with_properties.tf')
def copy_local_state(self):
pass
def rescue_local_state(self):
pass
def init_client(self):
terraform = Terraform(working_dir=self.build_path(), terraform_semantic_version=self.terraform_semantic_version)
terraform.init(backend_config=self.backend_config)
self.print_terraform_command(terraform)
if self.use_workspace:
try:
terraform.workspace('select', self.stage)
self.print_terraform_command(terraform)
except:
terraform.workspace('new', self.stage)
self.print_terraform_command(terraform)
return terraform

View file

@ -1,81 +1,81 @@
from boto3 import Session
from .python_util import execute
from .aws_backend_properties_mixin import AwsBackendPropertiesMixin
# from boto3 import Session
# from .python_util import execute
# from .aws_backend_properties_mixin import AwsBackendPropertiesMixin
def add_aws_mfa_mixin_config(config, account_id, region,
mfa_role='developer', mfa_account_prefix='',
mfa_login_account_suffix='main'):
config.update({'AwsMfaMixin':
{'account_id': account_id,
'region': region,
'mfa_role': mfa_role,
'mfa_account_prefix': mfa_account_prefix,
'mfa_login_account_suffix': mfa_login_account_suffix}})
return config
# def add_aws_mfa_mixin_config(config, account_id, region,
# mfa_role='developer', mfa_account_prefix='',
# mfa_login_account_suffix='main'):
# config.update({'AwsMfaMixin':
# {'account_id': account_id,
# 'region': region,
# 'mfa_role': mfa_role,
# 'mfa_account_prefix': mfa_account_prefix,
# 'mfa_login_account_suffix': mfa_login_account_suffix}})
# return config
class AwsMfaMixin(AwsBackendPropertiesMixin):
# class AwsMfaMixin(AwsBackendPropertiesMixin):
def __init__(self, project, config):
super().__init__(project, config)
project.build_depends_on('boto3')
project.build_depends_on('mfa')
aws_mfa_mixin_config = config['AwsMfaMixin']
self.account_id = aws_mfa_mixin_config['account_id']
self.region = aws_mfa_mixin_config['region']
self.mfa_role = aws_mfa_mixin_config['mfa_role']
self.mfa_account_prefix = aws_mfa_mixin_config['mfa_account_prefix']
self.mfa_login_account_suffix = aws_mfa_mixin_config['mfa_login_account_suffix']
# def __init__(self, project, config):
# super().__init__(project, config)
# project.build_depends_on('boto3')
# project.build_depends_on('mfa')
# aws_mfa_mixin_config = config['AwsMfaMixin']
# self.account_id = aws_mfa_mixin_config['account_id']
# self.region = aws_mfa_mixin_config['region']
# self.mfa_role = aws_mfa_mixin_config['mfa_role']
# self.mfa_account_prefix = aws_mfa_mixin_config['mfa_account_prefix']
# self.mfa_login_account_suffix = aws_mfa_mixin_config['mfa_login_account_suffix']
def project_vars(self):
ret = super().project_vars()
ret.update({'account_name': self.account_name,
'account_id': self.account_id,
'region': self.region,
'mfa_role': self.mfa_role,
'mfa_account_prefix': self.mfa_account_prefix,
'mfa_login_account_suffix': self.mfa_login_account_suffix})
return ret
# def project_vars(self):
# ret = super().project_vars()
# ret.update({'account_name': self.account_name,
# 'account_id': self.account_id,
# 'region': self.region,
# 'mfa_role': self.mfa_role,
# 'mfa_account_prefix': self.mfa_account_prefix,
# 'mfa_login_account_suffix': self.mfa_login_account_suffix})
# return ret
def get_username_from_account(self, p_account_name):
login_id = execute(r'cat ~/.aws/accounts | grep -A 2 "\[' + p_account_name +
r'\]" | grep username | awk -F= \'{print $2}\'', shell=True)
return login_id
# def get_username_from_account(self, p_account_name):
# login_id = execute(r'cat ~/.aws/accounts | grep -A 2 "\[' + p_account_name +
# r'\]" | grep username | awk -F= \'{print $2}\'', shell=True)
# return login_id
def get_account_id_from_account(self, p_account_name):
account_id = execute(r'cat ~/.aws/accounts | grep -A 2 "\[' + p_account_name +
r'\]" | grep account | awk -F= \'{print $2}\'', shell=True)
return account_id
# def get_account_id_from_account(self, p_account_name):
# account_id = execute(r'cat ~/.aws/accounts | grep -A 2 "\[' + p_account_name +
# r'\]" | grep account | awk -F= \'{print $2}\'', shell=True)
# return account_id
def get_mfa(self, mfa_path='aws'):
mfa_token = execute('mfa otp ' + mfa_path, shell=True)
return mfa_token
# def get_mfa(self, mfa_path='aws'):
# mfa_token = execute('mfa otp ' + mfa_path, shell=True)
# return mfa_token
def write_aws_config(self, to_profile, key, secret):
execute('aws configure --profile ' + to_profile +
' set ' + key + ' ' + secret, shell=True)
# def write_aws_config(self, to_profile, key, secret):
# execute('aws configure --profile ' + to_profile +
# ' set ' + key + ' ' + secret, shell=True)
def get_mfa_session(self):
from_account_name = self.mfa_account_prefix + self.mfa_login_account_suffix
from_account_id = self.get_account_id_from_account(from_account_name)
to_account_name = self.mfa_account_prefix + self.account_name
to_account_id = self.get_account_id_from_account(to_account_name)
login_id = self.get_username_from_account(from_account_name)
mfa_token = self.get_mfa()
ses = Session(profile_name=from_account_name)
sts_client = ses.client('sts')
response = sts_client.assume_role(
RoleArn='arn:aws:iam::' + to_account_id + ':role/' + self.mfa_role,
RoleSessionName=to_account_id + '-' + self.account_name + '-' + self.mfa_role,
SerialNumber='arn:aws:iam::' + from_account_id + ':mfa/' + login_id,
TokenCode=mfa_token
)
# def get_mfa_session(self):
# from_account_name = self.mfa_account_prefix + self.mfa_login_account_suffix
# from_account_id = self.get_account_id_from_account(from_account_name)
# to_account_name = self.mfa_account_prefix + self.account_name
# to_account_id = self.get_account_id_from_account(to_account_name)
# login_id = self.get_username_from_account(from_account_name)
# mfa_token = self.get_mfa()
# ses = Session(profile_name=from_account_name)
# sts_client = ses.client('sts')
# response = sts_client.assume_role(
# RoleArn='arn:aws:iam::' + to_account_id + ':role/' + self.mfa_role,
# RoleSessionName=to_account_id + '-' + self.account_name + '-' + self.mfa_role,
# SerialNumber='arn:aws:iam::' + from_account_id + ':mfa/' + login_id,
# TokenCode=mfa_token
# )
self.write_aws_config(to_account_name, 'aws_access_key_id',
response['Credentials']['AccessKeyId'])
self.write_aws_config(to_account_name, 'aws_secret_access_key',
response['Credentials']['SecretAccessKey'])
self.write_aws_config(to_account_name, 'aws_session_token',
response['Credentials']['SessionToken'])
print('got token')
# self.write_aws_config(to_account_name, 'aws_access_key_id',
# response['Credentials']['AccessKeyId'])
# self.write_aws_config(to_account_name, 'aws_secret_access_key',
# response['Credentials']['SecretAccessKey'])
# self.write_aws_config(to_account_name, 'aws_session_token',
# response['Credentials']['SessionToken'])
# print('got token')

View file

@ -1,117 +0,0 @@
from .python_util import execute
from .credential import gopass_password_from_path, gopass_field_from_path
from .devops_build import DevopsBuild
def add_aws_rds_pg_mixin_config(config, rds_host_name, db_name,
rds_resolve_dns=False,
db_port='5432'):
config.update({'AwsRdsPgMixin':
{'rds_host_name': rds_host_name,
'db_name': db_name,
'rds_resolve_dns': rds_resolve_dns,
'db_port': db_port,
}})
return config
class AwsRdsPgMixin(DevopsBuild):
def __init__(self, project, config):
super().__init__(project, config)
aws_rds_pg_mixin_config = config['AwsRdsPgMixin']
self.rds_host_name = aws_rds_pg_mixin_config['rds_host_name']
self.rds_resolve_dns = aws_rds_pg_mixin_config['rds_resolve_dns']
self.db_name = aws_rds_pg_mixin_config['db_name']
self.db_port = aws_rds_pg_mixin_config['db_port']
def execute_pg_rds_sql(self, user, password, sql):
if self.rds_resolve_dns:
host_cmd = "dig " + self.rds_host_name + " +short | head -n1"
host = execute(host_cmd, shell=True)
else:
host = self.rds_host_name
cmd = "PGUSER=" + user + " PGPASSWORD=" + password + \
" psql --dbname=" + self.db_name + " --host=" + host + " --port=" + self.db_port + \
" --set=sslmode=require -Atc \"" + sql + "\""
result = execute(cmd, shell=True)
print("PSQL: ", host, result.rstrip())
return result
def alter_db_user_password(self, gopass_path):
user_name = gopass_field_from_path(gopass_path, 'user')
user_old_password = gopass_field_from_path(gopass_path, 'old-password')
user_new_password = gopass_password_from_path(gopass_path)
self.execute_pg_rds_sql(user_name, user_old_password,
"ALTER ROLE " + user_name + " WITH PASSWORD '" + user_new_password + "';")
print("changed password:", user_name)
def add_new_user(self, gopass_path_superuser, gopass_path_new_user, group_role):
superuser_name = gopass_field_from_path(gopass_path_superuser, 'user')
superuser_password = gopass_password_from_path(gopass_path_superuser)
new_user_name = gopass_field_from_path(gopass_path_new_user, 'user')
new_user_password = gopass_password_from_path(gopass_path_new_user)
self.execute_pg_rds_sql(superuser_name, superuser_password,
"CREATE ROLE " + new_user_name + " WITH LOGIN INHERIT PASSWORD '" + new_user_password + "';" +
"GRANT " + group_role + " TO " + new_user_name + ";")
print("created user:", new_user_name)
def deactivate_user(self, gopass_path_superuser, to_deactivate_user_name):
superuser_name = gopass_field_from_path(gopass_path_superuser, 'user')
superuser_password = gopass_password_from_path(gopass_path_superuser)
owned_by_wrong_user = self.execute_pg_rds_sql(superuser_name, superuser_password,
"SELECT count(*) FROM pg_class c, pg_user u WHERE c.relowner = u.usesysid " +
"and u.usename='" + to_deactivate_user_name + "';")
if int(owned_by_wrong_user) > 0:
owned_objects = self.execute_pg_rds_sql(superuser_name, superuser_password,
"SELECT c.relname FROM pg_class c, pg_user u WHERE c.relowner = u.usesysid " +
"and u.usename='" + to_deactivate_user_name + "';")
raise AssertionError(
"There are still objects owned by the user to be deactivated:", owned_objects,to_deactivate_user_name)
connections = self.execute_pg_rds_sql(superuser_name, superuser_password,
"SELECT count(*) FROM pg_stat_activity WHERE application_name = " +
"'PostgreSQL JDBC Driver' AND usename = '" + to_deactivate_user_name + "';")
if int(connections) > 0:
raise AssertionError("User is still connected.")
self.execute_pg_rds_sql(superuser_name, superuser_password,
"ALTER ROLE " + to_deactivate_user_name + " WITH NOLOGIN NOCREATEROLE;")
print('deactivated user:', to_deactivate_user_name)
def change_owned_objects(self, gopass_path_superuser, to_deactivate_user_name, owner):
superuser_name = gopass_field_from_path(gopass_path_superuser, 'user')
superuser_password = gopass_password_from_path(gopass_path_superuser)
alter_objects = f"""SELECT 'ALTER TABLE ' || c.relname || ' OWNER TO {owner};'
FROM pg_class c, pg_user u
WHERE c.relowner = u.usesysid
and c.relkind = 'r'
and u.usename='{to_deactivate_user_name}'
UNION
SELECT 'ALTER INDEX ' || c.relname || ' OWNER TO {owner};'
FROM pg_class c, pg_user u
WHERE c.relowner = u.usesysid
and c.relkind = 'i'
and c.relname not like 'pg_toast%'
and u.usename='{to_deactivate_user_name}'
UNION
SELECT 'ALTER SEQUENCE ' || c.relname || ' OWNER TO {owner};'
FROM pg_class c, pg_user u
WHERE c.relowner = u.usesysid
and c.relkind = 'S'
and u.usename='{to_deactivate_user_name}';"""
alter_stmt = self.execute_pg_rds_sql(superuser_name, superuser_password, alter_objects)
alter_stmt.strip()
if alter_stmt != '':
print('apply alter statements? \n', alter_stmt)
proceed = input('\n[y/n] \n')
if proceed == 'y':
self.execute_pg_rds_sql(superuser_name, superuser_password, alter_stmt)

View file

@ -0,0 +1,38 @@
from .domain import BuildType, DnsRecord
from .devops_build import DevopsBuild
from .infrastructure import ExecutionApi
class C4kBuild(DevopsBuild):
def __init__(self, project, config):
super().__init__(project, config)
self.execution_api = ExecutionApi()
devops = self.devops_repo.get_devops(self.project)
if BuildType.C4K not in devops.specialized_builds:
raise ValueError("C4kBuild requires BuildType.C4K")
def update_runtime_config(self, dns_record: DnsRecord):
super().update_runtime_config(dns_record)
devops = self.devops_repo.get_devops(self.project)
devops.specialized_builds[BuildType.C4K].update_runtime_config(dns_record)
self.devops_repo.set_devops(self.project, devops)
def write_c4k_config(self):
devops = self.devops_repo.get_devops(self.project)
path = devops.build_path() + "/out_c4k_config.yaml"
self.file_api.write_yaml_to_file(
path, devops.specialized_builds[BuildType.C4K].config()
)
def write_c4k_auth(self):
devops = self.devops_repo.get_devops(self.project)
path = devops.build_path() + "/out_c4k_auth.yaml"
self.file_api.write_yaml_to_file(
path, devops.specialized_builds[BuildType.C4K].auth()
)
def c4k_apply(self, dry_run=False):
devops = self.devops_repo.get_devops(self.project)
return self.execution_api.execute(
devops.specialized_builds[BuildType.C4K].command(devops), dry_run
)

View file

@ -1,63 +0,0 @@
from os import chmod
import yaml
from .python_util import execute
from .devops_build import DevopsBuild
from .credential import gopass_field_from_path, gopass_password_from_path
def add_c4k_mixin_config(config,
c4k_module_name,
c4k_config_dict,
c4k_auth_dict,
grafana_cloud_user=None,
grafana_cloud_password=None,
grafana_cloud_url='https://prometheus-prod-01-eu-west-0.grafana.net/api/prom/push'):
if not grafana_cloud_user:
grafana_cloud_user = gopass_field_from_path(
'server/meissa/grafana-cloud', 'grafana-cloud-user')
if not grafana_cloud_password:
grafana_cloud_password = gopass_password_from_path(
'server/meissa/grafana-cloud')
c4k_auth_dict.update({'mon-auth': {
'grafana-cloud-user': grafana_cloud_user,
'grafana-cloud-password': grafana_cloud_password
}})
c4k_config_dict.update({'mon-cfg': {
'grafana-cloud-url': grafana_cloud_url
}})
config.update({'C4kMixin': {'Config': c4k_config_dict,
'Auth': c4k_auth_dict,
'Name': c4k_module_name}})
return config
class C4kMixin(DevopsBuild):
def __init__(self, project, config):
super().__init__(project, config)
self.c4k_mixin_config = config['C4kMixin']['Config']
self.c4k_mixin_auth = config['C4kMixin']['Auth']
self.c4k_module_name = config['C4kMixin']['Name']
tmp = self.c4k_mixin_config['mon-cfg']
tmp.update({'cluster-name': self.c4k_module_name,
'cluster-stage': self.stage})
self.c4k_mixin_config.update({'mon-cfg': tmp})
def write_c4k_config(self):
fqdn = self.get('fqdn')
self.c4k_mixin_config.update({'fqdn': fqdn})
with open(self.build_path() + '/out_c4k_config.yaml', 'w', encoding="utf-8") as output_file:
yaml.dump(self.c4k_mixin_config, output_file)
def write_c4k_auth(self):
with open(self.build_path() + '/out_c4k_auth.yaml', 'w', encoding="utf-8") as output_file:
yaml.dump(self.c4k_mixin_auth, output_file)
chmod(self.build_path() + '/out_c4k_auth.yaml', 0o600)
def c4k_apply(self, dry_run=False):
cmd = f'c4k-{self.c4k_module_name}-standalone.jar {self.build_path()}/out_c4k_config.yaml {self.build_path()}/out_c4k_auth.yaml > {self.build_path()}/out_{self.c4k_module_name}.yaml'
output = ''
if dry_run:
print(cmd)
else:
output = execute(cmd, True)
print(output)
return output

View file

@ -1,15 +1,18 @@
from .python_util import execute
import deprecation
from .infrastructure import CredentialsApi
def gopass_field_from_path (path, field):
credential = None
if path and field:
print('get field for: ' + path + ', ' + field)
credential = execute(['gopass', 'show', path, field])
return credential
def gopass_password_from_path (path):
credential = None
if path:
print('get password for: ' + path)
credential = execute(['gopass', 'show', '--password', path])
return credential
@deprecation.deprecated(
deprecated_in="3.2", details="use infrastructure.CredentialsApi instead"
)
def gopass_field_from_path(path, field):
api = CredentialsApi()
return api.gopass_field_from_path(path, field)
@deprecation.deprecated(
deprecated_in="3.2", details="use infrastructure.CredentialsApi instead"
)
def gopass_password_from_path(path):
api = CredentialsApi()
return api.gopass_password_from_path(path)

View file

@ -1,58 +1,32 @@
from subprocess import run, CalledProcessError
from .python_util import filter_none
from .domain import InitService, DnsRecord
from .infrastructure import DevopsRepository, FileApi
def create_devops_build_config(stage, project_root_path, module,
build_dir_name='target'):
return {'stage': stage,
'project_root_path': project_root_path,
'module': module,
'build_dir_name': build_dir_name}
def get_devops_build(project):
return project.get_property('devops_build')
return project.get_property("build")
def get_tag_from_latest_commit():
try:
value = run('git describe --abbrev=0 --tags --exact-match', shell=True,
capture_output=True, check=True)
return value.stdout.decode('UTF-8').rstrip()
except CalledProcessError:
return None
class DevopsBuild:
def __init__(self, project, config):
#deprecate stage
self.stage = config['stage']
self.project_root_path = config['project_root_path']
self.module = config['module']
self.build_dir_name = config['build_dir_name']
self.stack = {}
def __init__(self, project, inp: dict):
self.project = project
project.set_property('devops_build', self)
self.file_api = FileApi()
self.init_service = InitService.prod(project.basedir)
self.devops_repo = DevopsRepository()
devops = self.init_service.initialize(inp)
self.devops_repo.set_devops(self.project, devops)
self.project.set_property("build", self)
def name(self):
return self.project.name
devops = self.devops_repo.get_devops(self.project)
return devops.name
def build_path(self):
mylist = [self.project_root_path,
self.build_dir_name,
self.name(),
self.module]
return '/'.join(filter_none(mylist))
devops = self.devops_repo.get_devops(self.project)
return devops.build_path()
def initialize_build_dir(self):
run('rm -rf ' + self.build_path(), shell=True, check=True)
run('mkdir -p ' + self.build_path(), shell=True, check=True)
devops = self.devops_repo.get_devops(self.project)
self.file_api.clean_dir(devops.build_path())
def put(self, key, value):
self.stack[key] = value
def get(self, key):
return self.stack[key]
def get_keys(self, keys):
result = {}
for key in keys:
result[key] = self.get(key)
return result
def update_runtime_config(self, dns_record: DnsRecord):
pass

View file

@ -1,96 +0,0 @@
import sys
from subprocess import run
from pkg_resources import resource_string
from .python_util import filter_none
from .devops_build import DevopsBuild, create_devops_build_config
def create_devops_docker_build_config(stage,
project_root_path,
module,
dockerhub_user,
dockerhub_password,
build_dir_name='target',
use_package_common_files=True,
build_commons_path=None,
docker_build_commons_dir_name='docker',
docker_publish_tag=None):
ret = create_devops_build_config(
stage, project_root_path, module, build_dir_name)
ret.update({'dockerhub_user': dockerhub_user,
'dockerhub_password': dockerhub_password,
'use_package_common_files': use_package_common_files,
'docker_build_commons_dir_name': docker_build_commons_dir_name,
'build_commons_path': build_commons_path,
'docker_publish_tag': docker_publish_tag, })
return ret
class DevopsDockerBuild(DevopsBuild):
def __init__(self, project, config):
super().__init__(project, config)
project.build_depends_on('dda-python-terraform')
self.dockerhub_user = config['dockerhub_user']
self.dockerhub_password = config['dockerhub_password']
self.use_package_common_files = config['use_package_common_files']
self.build_commons_path = config['build_commons_path']
self.docker_build_commons_dir_name = config['docker_build_commons_dir_name']
self.docker_publish_tag = config['docker_publish_tag']
def docker_build_commons_path(self):
mylist = [self.build_commons_path,
self.docker_build_commons_dir_name]
return '/'.join(filter_none(mylist)) + '/'
def copy_build_resource_file_from_package(self, name):
run('mkdir -p ' + self.build_path() + '/image/resources', shell=True, check=True)
my_data = resource_string(
__name__, "src/main/resources/docker/" + name)
with open(self.build_path() + '/' + name, "w", encoding="utf-8") as output_file:
output_file.write(my_data.decode(sys.stdout.encoding))
def copy_build_resources_from_package(self):
self.copy_build_resource_file_from_package(
'image/resources/install_functions.sh')
def copy_build_resources_from_dir(self):
run('cp -f ' + self.docker_build_commons_path() +
'* ' + self.build_path(), shell=True, check=True)
def initialize_build_dir(self):
super().initialize_build_dir()
if self.use_package_common_files:
self.copy_build_resources_from_package()
else:
self.copy_build_resources_from_dir()
run('cp -r image ' + self.build_path(), shell=True, check=True)
run('cp -r test ' + self.build_path(), shell=True, check=True)
def image(self):
run('docker build -t ' + self.name() +
' --file ' + self.build_path() + '/image/Dockerfile '
+ self.build_path() + '/image', shell=True, check=True)
def drun(self):
run('docker run --expose 8080 -it --entrypoint="" ' +
self.name() + ' /bin/bash', shell=True, check=True)
def dockerhub_login(self):
run('docker login --username ' + self.dockerhub_user +
' --password ' + self.dockerhub_password, shell=True, check=True)
def dockerhub_publish(self):
if self.docker_publish_tag is not None:
run('docker tag ' + self.name() + ' ' + self.dockerhub_user +
'/' + self.name() + ':' + self.docker_publish_tag, shell=True, check=True)
run('docker push ' + self.dockerhub_user +
'/' + self.name() + ':' + self.docker_publish_tag, shell=True, check=True)
run('docker tag ' + self.name() + ' ' + self.dockerhub_user +
'/' + self.name() + ':latest', shell=True, check=True)
run('docker push ' + self.dockerhub_user +
'/' + self.name() + ':latest', shell=True, check=True)
def test(self):
run('docker build -t ' + self.name() + '-test ' +
'--file ' + self.build_path() + '/test/Dockerfile '
+ self.build_path() + '/test', shell=True, check=True)

View file

@ -0,0 +1,37 @@
from .domain import BuildType
from .application import ImageBuildService
from .devops_build import DevopsBuild
class DevopsImageBuild(DevopsBuild):
def __init__(self, project, inp: dict):
super().__init__(project, inp)
self.image_build_service = ImageBuildService.prod()
devops = self.devops_repo.get_devops(self.project)
if BuildType.IMAGE not in devops.specialized_builds:
raise ValueError("ImageBuild requires BuildType.IMAGE")
def initialize_build_dir(self):
super().initialize_build_dir()
devops = self.devops_repo.get_devops(self.project)
self.image_build_service.initialize_build_dir(devops)
def image(self):
devops = self.devops_repo.get_devops(self.project)
self.image_build_service.image(devops)
def drun(self):
devops = self.devops_repo.get_devops(self.project)
self.image_build_service.drun(devops)
def dockerhub_login(self):
devops = self.devops_repo.get_devops(self.project)
self.image_build_service.dockerhub_login(devops)
def dockerhub_publish(self):
devops = self.devops_repo.get_devops(self.project)
self.image_build_service.dockerhub_publish(devops)
def test(self):
devops = self.devops_repo.get_devops(self.project)
self.image_build_service.test(devops)

View file

@ -1,221 +1,63 @@
import sys
from os import chmod
from json import load, dumps
from subprocess import run
from packaging import version
from pkg_resources import resource_string
from dda_python_terraform import Terraform, IsFlagged
from .python_util import filter_none
from .devops_build import DevopsBuild, create_devops_build_config
from .devops_build import DevopsBuild
from .application import TerraformService
def create_devops_terraform_build_config(stage,
project_root_path,
module,
additional_vars,
build_dir_name='target',
output_json_name=None,
use_workspace=True,
use_package_common_files=True,
build_commons_path=None,
terraform_build_commons_dir_name='terraform',
debug_print_terraform_command=False,
additional_tfvar_files=None,
terraform_semantic_version="1.0.8"):
if not output_json_name:
output_json_name = 'out_' + module + '.json'
if not additional_tfvar_files:
additional_tfvar_files = []
ret = create_devops_build_config(
stage, project_root_path, module, build_dir_name)
ret.update({'additional_vars': additional_vars,
'output_json_name': output_json_name,
'use_workspace': use_workspace,
'use_package_common_files': use_package_common_files,
'build_commons_path': build_commons_path,
'terraform_build_commons_dir_name': terraform_build_commons_dir_name,
'debug_print_terraform_command': debug_print_terraform_command,
'additional_tfvar_files': additional_tfvar_files,
'terraform_semantic_version': terraform_semantic_version})
return ret
class DevopsTerraformBuild(DevopsBuild):
def __init__(self, project, config):
super().__init__(project, config)
project.build_depends_on('dda-python-terraform')
self.additional_vars = config['additional_vars']
self.output_json_name = config['output_json_name']
self.use_workspace = config['use_workspace']
self.use_package_common_files = config['use_package_common_files']
self.build_commons_path = config['build_commons_path']
self.terraform_build_commons_dir_name = config['terraform_build_commons_dir_name']
self.debug_print_terraform_command = config['debug_print_terraform_command']
self.additional_tfvar_files = config['additional_tfvar_files']
self.terraform_semantic_version = config['terraform_semantic_version']
def terraform_build_commons_path(self):
mylist = [self.build_commons_path,
self.terraform_build_commons_dir_name]
return '/'.join(filter_none(mylist)) + '/'
def project_vars(self):
ret = {'stage': self.stage}
if self.module:
ret['module'] = self.module
if self.additional_vars:
ret.update(self.additional_vars)
return ret
def copy_build_resource_file_from_package(self, name):
my_data = resource_string(
__name__, "src/main/resources/terraform/" + name)
with open(self.build_path() + '/' + name, "w", encoding="utf-8") as output_file:
output_file.write(my_data.decode(sys.stdout.encoding))
def copy_build_resources_from_package(self):
self.copy_build_resource_file_from_package('versions.tf')
self.copy_build_resource_file_from_package('terraform_build_vars.tf')
def copy_build_resources_from_dir(self):
run('cp -f ' + self.terraform_build_commons_path() +
'* ' + self.build_path(), shell=False, check=False)
def copy_local_state(self):
run('cp terraform.tfstate ' + self.build_path(), shell=False, check=False)
def rescue_local_state(self):
run('cp ' + self.build_path() + '/terraform.tfstate .', shell=False, check=False)
inp = config.copy()
inp["name"] = project.name
inp["module"] = config.get("module")
inp["stage"] = config.get("stage")
inp["project_root_path"] = config.get("project_root_path")
inp["build_types"] = config.get("build_types", [])
inp["mixin_types"] = config.get("mixin_types", [])
super().__init__(project, inp)
project.build_depends_on("dda-python-terraform")
self.teraform_service = TerraformService.prod()
def initialize_build_dir(self):
super().initialize_build_dir()
if self.use_package_common_files:
self.copy_build_resources_from_package()
else:
self.copy_build_resources_from_dir()
self.copy_local_state()
run('cp *.tf ' + self.build_path(), shell=True, check=False)
run('cp *.properties ' + self.build_path(), shell=True, check=False)
run('cp *.tfvars ' + self.build_path(), shell=True, check=False)
run('cp -r scripts ' + self.build_path(), shell=True, check=False)
devops = self.devops_repo.get_devops(self.project)
self.teraform_service.initialize_build_dir(devops)
def post_build(self):
self.rescue_local_state()
devops = self.devops_repo.get_devops(self.project)
self.teraform_service.post_build(devops)
def init_client(self):
terraform = Terraform(working_dir=self.build_path(), terraform_semantic_version=self.terraform_semantic_version)
terraform.init()
self.print_terraform_command(terraform)
if self.use_workspace:
try:
terraform.workspace('select', self.stage)
self.print_terraform_command(terraform)
except:
terraform.workspace('new', self.stage)
self.print_terraform_command(terraform)
return terraform
def write_output(self, terraform):
result = terraform.output(json=IsFlagged)
self.print_terraform_command(terraform)
with open(self.build_path() + self.output_json_name, "w", encoding="utf-8") as output_file:
output_file.write(dumps(result))
chmod(self.build_path() + self.output_json_name, 0o600)
def read_output_json(self):
with open(self.build_path() + self.output_json_name, 'r', encoding="utf-8") as file:
return load(file)
def read_output_json(self) -> map:
devops = self.devops_repo.get_devops(self.project)
return self.teraform_service.read_output(devops)
def plan(self):
terraform = self.init_client()
return_code, _, stderr = terraform.plan(detailed_exitcode=None, capture_output=False, raise_on_error=False,
var=self.project_vars(),
var_file=self.additional_tfvar_files)
devops = self.devops_repo.get_devops(self.project)
self.teraform_service.plan(devops)
self.post_build()
self.print_terraform_command(terraform)
if return_code > 0:
raise Exception(return_code, "terraform error:", stderr)
def plan_fail_on_diff(self):
terraform = self.init_client()
return_code, _, stderr = terraform.plan(detailed_exitcode=IsFlagged, capture_output=False, raise_on_error=False,
var=self.project_vars(),
var_file=self.additional_tfvar_files)
devops = self.devops_repo.get_devops(self.project)
self.teraform_service.plan(devops, fail_on_diff=True)
self.post_build()
self.print_terraform_command(terraform)
if return_code not in (0, 2):
raise Exception(return_code, "terraform error:", stderr)
if return_code == 2:
raise Exception(return_code, "diff in config found:", stderr)
def apply(self, auto_approve=False):
terraform = self.init_client()
if auto_approve:
auto_approve_flag = IsFlagged
else:
auto_approve_flag = None
if version.parse(self.terraform_semantic_version) >= version.parse("1.0.0"):
return_code, _, stderr = terraform.apply(capture_output=False, raise_on_error=True,
auto_approve=auto_approve_flag,
var=self.project_vars(),
var_file=self.additional_tfvar_files)
else:
return_code, _, stderr = terraform.apply(capture_output=False, raise_on_error=True,
skip_plan=auto_approve,
var=self.project_vars(),
var_file=self.additional_tfvar_files)
self.write_output(terraform)
devops = self.devops_repo.get_devops(self.project)
self.teraform_service.apply(devops, auto_approve=auto_approve)
self.post_build()
self.print_terraform_command(terraform)
if return_code > 0:
raise Exception(return_code, "terraform error:", stderr)
def refresh(self):
terraform = self.init_client()
return_code, _, stderr = terraform.refresh(
var=self.project_vars(),
var_file=self.additional_tfvar_files)
self.write_output(terraform)
devops = self.devops_repo.get_devops(self.project)
self.teraform_service.refresh(devops)
self.post_build()
self.print_terraform_command(terraform)
if return_code > 0:
raise Exception(return_code, "terraform error:", stderr)
def destroy(self, auto_approve=False):
terraform = self.init_client()
if auto_approve:
auto_approve_flag = IsFlagged
else:
auto_approve_flag = None
if version.parse(self.terraform_semantic_version) >= version.parse("1.0.0"):
return_code, _, stderr = terraform.destroy(capture_output=False, raise_on_error=True,
auto_approve=auto_approve_flag,
var=self.project_vars(),
var_file=self.additional_tfvar_files)
else:
return_code, _, stderr = terraform.destroy(capture_output=False, raise_on_error=True,
force=auto_approve_flag,
var=self.project_vars(),
var_file=self.additional_tfvar_files)
devops = self.devops_repo.get_devops(self.project)
self.teraform_service.destroy(devops, auto_approve=auto_approve)
self.post_build()
self.print_terraform_command(terraform)
if return_code > 0:
raise Exception(return_code, "terraform error:", stderr)
def tf_import(self, tf_import_name, tf_import_resource,):
terraform = self.init_client()
return_code, _, stderr = terraform.import_cmd(tf_import_name, tf_import_resource,
capture_output=False, raise_on_error=True,
var=self.project_vars(),
var_file=self.additional_tfvar_files)
def tf_import(
self,
tf_import_name,
tf_import_resource,
):
devops = self.devops_repo.get_devops(self.project)
self.teraform_service.tf_import(devops, tf_import_name, tf_import_resource)
self.post_build()
self.print_terraform_command(terraform)
if return_code > 0:
raise Exception(return_code, "terraform error:", stderr)
def print_terraform_command(self, terraform):
if self.debug_print_terraform_command:
output = 'cd ' + self.build_path() + ' && ' + terraform.latest_cmd()
print(output)

View file

@ -1,72 +0,0 @@
from dda_python_terraform import Terraform
from .digitalocean_terraform_build import DigitaloceanTerraformBuild
def add_digitalocean_backend_properties_mixin_config(config,
account_name,
endpoint,
bucket,
key,
region='eu-central-1'):
config.update({'DigitaloceanBackendPropertiesMixin':
{'account_name': account_name,
'endpoint': endpoint,
'bucket': bucket,
'key': key,
'region': region}})
return config
class DigitaloceanBackendPropertiesMixin(DigitaloceanTerraformBuild):
def __init__(self, project, config):
super().__init__(project, config)
do_mixin_config = config['DigitaloceanBackendPropertiesMixin']
self.account_name = do_mixin_config['account_name']
self.endpoint = do_mixin_config['endpoint']
self.bucket = do_mixin_config['bucket']
self.key = do_mixin_config['account_name'] + \
'/' + do_mixin_config['key']
self.region = do_mixin_config['region']
self.backend_config = {
'access_key': self.do_spaces_access_id,
'secret_key': self.do_spaces_secret_key,
'endpoint': self.endpoint,
'bucket': self.bucket,
'key': self.key,
'region': self.region}
def project_vars(self):
ret = super().project_vars()
ret.update({'account_name': self.account_name})
ret.update({'endpoint': self.endpoint})
ret.update({'bucket': self.bucket})
ret.update({'key': self.key})
ret.update({'region': self.region})
return ret
def copy_build_resources_from_package(self):
super().copy_build_resources_from_package()
self.copy_build_resource_file_from_package(
'do_backend_properties_vars.tf')
self.copy_build_resource_file_from_package(
'do_backend_with_properties.tf')
def copy_local_state(self):
pass
def rescue_local_state(self):
pass
def init_client(self):
terraform = Terraform(working_dir=self.build_path(),
terraform_semantic_version=self.terraform_semantic_version)
terraform.init(backend_config=self.backend_config)
self.print_terraform_command(terraform)
if self.use_workspace:
try:
terraform.workspace('select', self.stage)
self.print_terraform_command(terraform)
except:
terraform.workspace('new', self.stage)
self.print_terraform_command(terraform)
return terraform

View file

@ -1,63 +0,0 @@
from .devops_terraform_build import DevopsTerraformBuild, create_devops_terraform_build_config
def create_digitalocean_terraform_build_config(stage,
project_root_path,
module,
additional_vars,
do_api_key,
do_spaces_access_id,
do_spaces_secret_key,
build_dir_name='target',
output_json_name=None,
use_workspace=True,
use_package_common_files=True,
build_commons_path=None,
terraform_build_commons_dir_name='terraform',
debug_print_terraform_command=False,
additional_tfvar_files=None,
terraform_semantic_version="1.0.8",
):
if not additional_tfvar_files:
additional_tfvar_files = []
config = create_devops_terraform_build_config(stage,
project_root_path,
module,
additional_vars,
build_dir_name,
output_json_name,
use_workspace,
use_package_common_files,
build_commons_path,
terraform_build_commons_dir_name,
debug_print_terraform_command,
additional_tfvar_files,
terraform_semantic_version)
config.update({'DigitaloceanTerraformBuild':
{'do_api_key': do_api_key,
'do_spaces_access_id': do_spaces_access_id,
'do_spaces_secret_key': do_spaces_secret_key}})
return config
class DigitaloceanTerraformBuild(DevopsTerraformBuild):
def __init__(self, project, config):
super().__init__(project, config)
do_mixin_config = config['DigitaloceanTerraformBuild']
self.do_api_key = do_mixin_config['do_api_key']
self.do_spaces_access_id = do_mixin_config['do_spaces_access_id']
self.do_spaces_secret_key = do_mixin_config['do_spaces_secret_key']
def project_vars(self):
ret = super().project_vars()
ret['do_api_key'] = self.do_api_key
ret['do_spaces_access_id'] = self.do_spaces_access_id
ret['do_spaces_secret_key'] = self.do_spaces_secret_key
return ret
def copy_build_resources_from_package(self):
super().copy_build_resources_from_package()
self.copy_build_resource_file_from_package('provider_registry.tf')
self.copy_build_resource_file_from_package('do_provider.tf')
self.copy_build_resource_file_from_package('do_mixin_vars.tf')

View file

@ -0,0 +1,23 @@
from .common import (
Validateable,
CredentialMappingDefault,
DnsRecord,
Devops,
BuildType,
MixinType,
ReleaseType,
ProviderType,
)
from .devops_factory import DevopsFactory
from .image import Image
from .c4k import C4k
from .terraform import TerraformDomain
from .provider_digitalocean import Digitalocean
from .provider_hetzner import Hetzner
from .provider_aws import Aws
from .provs_k3s import K3s
from .release import Release
from .credentials import Credentials, CredentialMapping, GopassType
from .version import Version
from .build_file import BuildFileType, BuildFile
from .init_service import InitService

View file

@ -0,0 +1,121 @@
import json
import re
from enum import Enum
from pathlib import Path
from .common import Validateable
from .version import Version
class BuildFileType(Enum):
JS = ".json"
JAVA_GRADLE = ".gradle"
JAVA_CLOJURE = ".clj"
PYTHON = ".py"
class BuildFile(Validateable):
def __init__(self, file_path: Path, content: str):
self.file_path = file_path
self.content = content
def validate(self):
result = []
result += self.__validate_is_not_empty__("file_path")
result += self.__validate_is_not_empty__("content")
if not self.build_file_type():
result += [f"Suffix {self.file_path} is unknown."]
return result
def build_file_type(self):
if not self.file_path:
return None
config_file_type = self.file_path.suffix
match config_file_type:
case ".json":
result = BuildFileType.JS
case ".gradle":
result = BuildFileType.JAVA_GRADLE
case ".clj":
result = BuildFileType.JAVA_CLOJURE
case ".py":
result = BuildFileType.PYTHON
case _:
result = None
return result
def get_version(self) -> Version:
try:
match self.build_file_type():
case BuildFileType.JS:
version_str = json.loads(self.content)["version"]
case BuildFileType.JAVA_GRADLE:
# TODO: '\nversion = ' will not parse all ?!
version_line = re.search("\nversion = .*", self.content)
version_line_group = version_line.group()
version_string = re.search(
"[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?", version_line_group
)
version_str = version_string.group()
case BuildFileType.PYTHON:
# TODO: '\nversion = ' will not parse all ?!
version_line = re.search("\nversion = .*\n", self.content)
version_line_group = version_line.group()
version_string = re.search(
"[0-9]*\\.[0-9]*\\.[0-9]*(-dev)?[0-9]*", version_line_group
)
version_str = version_string.group()
case BuildFileType.JAVA_CLOJURE:
# TODO: unsure about the trailing '\n' !
version_line = re.search("\\(defproject .*\n", self.content)
version_line_group = version_line.group()
version_string = re.search(
"[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?", version_line_group
)
version_str = version_string.group()
except:
raise Exception(f"Version not found in file {self.file_path}")
result = Version.from_str(version_str)
result.throw_if_invalid()
return result
def set_version(self, new_version: Version):
# TODO: How can we create regex-pattern constants to use them at both places?
try:
match self.build_file_type():
case BuildFileType.JS:
json_data = json.loads(self.content)
json_data["version"] = new_version.to_string()
self.content = json.dumps(json_data, indent=4)
case BuildFileType.JAVA_GRADLE:
substitute = re.sub(
'\nversion = "[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?"',
f'\nversion = "{new_version.to_string()}"',
self.content,
)
self.content = substitute
case BuildFileType.PYTHON:
substitute = re.sub(
'\nversion = "[0-9]*\\.[0-9]*\\.[0-9]*(-dev)?[0-9]*"',
f'\nversion = "{new_version.to_string()}"',
self.content,
)
self.content = substitute
case BuildFileType.JAVA_CLOJURE:
# TODO: we should stick here on defproject instead of first line!
substitute = re.sub(
'"[0-9]*\\.[0-9]*\\.[0-9]*(-SNAPSHOT)?"',
f'"{new_version.to_string()}"',
self.content,
1,
)
self.content = substitute
except:
raise Exception(f"Version not found in file {self.file_path}")
def __eq__(self, other):
return other and self.file_path == other.file_path
def __hash__(self) -> int:
return self.file_path.__hash__()

View file

@ -0,0 +1,80 @@
from typing import List, Dict, Optional
from .common import (
Validateable,
CredentialMappingDefault,
DnsRecord,
Devops,
)
class C4k(Validateable, CredentialMappingDefault):
def __init__(self, inp: dict):
self.module = inp.get("module")
self.stage = inp.get("stage")
self.c4k_executable_name = inp.get("c4k_executable_name", inp.get("module"))
self.c4k_config = inp.get("c4k_config", {})
self.c4k_grafana_cloud_url = inp.get(
"c4k_grafana_cloud_url",
"https://prometheus-prod-01-eu-west-0.grafana.net/api/prom/push",
)
self.c4k_auth = inp.get("c4k_auth", {})
self.c4k_grafana_cloud_user = inp.get("c4k_grafana_cloud_user")
self.c4k_grafana_cloud_password = inp.get("c4k_grafana_cloud_password")
self.dns_record: Optional[DnsRecord] = None
def update_runtime_config(self, dns_record: DnsRecord):
self.dns_record = dns_record
self.throw_if_invalid()
def validate(self) -> List[str]:
result = []
result += self.__validate_is_not_empty__("module")
result += self.__validate_is_not_empty__("stage")
result += self.__validate_is_not_empty__("c4k_executable_name")
result += self.__validate_is_not_empty__("c4k_grafana_cloud_user")
result += self.__validate_is_not_empty__("c4k_grafana_cloud_password")
if self.dns_record:
result += self.dns_record.validate()
return result
def config(self):
if not self.dns_record:
raise ValueError("dns_reqord was not set.")
result = self.c4k_config.copy()
result["fqdn"] = self.dns_record.fqdn
result["mon-cfg"] = {
"cluster-name": self.module,
"cluster-stage": self.stage,
"grafana-cloud-url": self.c4k_grafana_cloud_url,
}
return result
def auth(self):
result = self.c4k_auth.copy()
result["mon-auth"] = {
"grafana-cloud-user": self.c4k_grafana_cloud_user,
"grafana-cloud-password": self.c4k_grafana_cloud_password,
}
return result
def command(self, devops: Devops):
module = devops.module
build_path = devops.build_path()
config_path = f"{build_path}/out_c4k_config.yaml"
auth_path = f"{build_path}/out_c4k_auth.yaml"
output_path = f"{build_path}/out_{module}.yaml"
return f"c4k-{self.c4k_executable_name}-standalone.jar {config_path} {auth_path} > {output_path}"
@classmethod
def get_mapping_default(cls) -> List[Dict[str, str]]:
return [
{
"gopass_path": "server/meissa/grafana-cloud",
"gopass_field": "grafana-cloud-user",
"name": "c4k_grafana_cloud_user",
},
{
"gopass_path": "server/meissa/grafana-cloud",
"name": "c4k_grafana_cloud_password",
},
]

View file

@ -0,0 +1,113 @@
from enum import Enum
from typing import List, Dict
def filter_none(list_to_filter):
return [x for x in list_to_filter if x is not None]
class BuildType(Enum):
IMAGE = 0
C4K = 1
K3S = 2
TERRAFORM = 3
class ProviderType(Enum):
DIGITALOCEAN = 0
HETZNER = 1
AWS = 2
class MixinType(Enum):
RELEASE = 0
class ReleaseType(Enum):
MAJOR = 3
MINOR = 2
PATCH = 1
NONE = None
class Validateable:
def __validate_is_not_none__(self, field_name: str) -> List[str]:
value = self.__dict__[field_name]
if value is None:
return [f"Field '{field_name}' must not be None."]
return []
def __validate_is_not_empty__(self, field_name: str) -> List[str]:
result = self.__validate_is_not_none__(field_name)
if len(result) == 0:
value = self.__dict__[field_name]
if type(value) is str and value == "":
result += [f"Field '{field_name}' must not be empty."]
elif type(value) is list and len(value) == 0:
result += [f"Field '{field_name}' must not be empty."]
return result
def validate(self) -> List[str]:
return []
def is_valid(self) -> bool:
return len(self.validate()) < 1
def throw_if_invalid(self):
if not self.is_valid():
issues = "\n".join(self.validate())
raise ValueError(f"Invalid Validateable: {issues}")
class CredentialMappingDefault:
@classmethod
def get_mapping_default(cls) -> List[Dict[str, str]]:
return []
class DnsRecord(Validateable):
def __init__(self, fqdn, ipv4=None, ipv6=None):
self.fqdn = fqdn
self.ipv4 = ipv4
self.ipv6 = ipv6
def validate(self) -> List[str]:
result = []
result += self.__validate_is_not_empty__("fqdn")
if (not self.ipv4) and (not self.ipv6):
result.append("ipv4 & ipv6 may not both be empty.")
return result
class Devops(Validateable):
def __init__(
self,
inp: dict,
specialized_builds: dict[BuildType, Validateable],
mixins: dict[MixinType, Validateable],
):
self.stage = inp.get("stage")
self.project_root_path = inp.get("project_root_path")
self.module = inp.get("module")
self.name = inp.get("name", self.module)
self.build_dir_name = inp.get("build_dir_name", "target")
self.specialized_builds = specialized_builds
self.mixins = mixins
def build_path(self):
path = [self.project_root_path, self.build_dir_name, self.name, self.module]
return "/".join(filter_none(path))
def validate(self) -> List[str]:
result = []
result += self.__validate_is_not_empty__("stage")
result += self.__validate_is_not_empty__("project_root_path")
result += self.__validate_is_not_empty__("module")
result += self.__validate_is_not_none__("specialized_builds")
if self.specialized_builds:
for build in self.specialized_builds:
result += self.specialized_builds[build].validate()
if self.mixins:
for mixin in self.mixins:
result += self.mixins[mixin].validate()
return result

View file

@ -0,0 +1,63 @@
from enum import Enum
from typing import List, Optional
from inflection import underscore
from .common import (
Validateable,
)
class GopassType(Enum):
FIELD = 0
PASSWORD = 1
class CredentialMapping(Validateable):
def __init__(self, mapping: dict):
self.name = mapping.get("name", None)
self.gopass_field = mapping.get("gopass_field", None)
self.gopass_path = mapping.get("gopass_path", None)
def validate(self) -> List[str]:
result = []
result += self.__validate_is_not_empty__("gopass_path")
if not self.name and not self.gopass_field:
result.append("Either name or gopass field has to be defined.")
return result
def gopass_type(self):
if self.gopass_field:
return GopassType.FIELD
else:
return GopassType.PASSWORD
def name_for_input(self):
if self.name:
result = self.name
elif self.gopass_field:
result = underscore(self.gopass_field)
else:
result = ""
return result
def name_for_environment(self):
return self.name_for_input().upper()
class Credentials(Validateable):
def __init__(self, inp: dict, default_mappings: Optional[List] = None):
if default_mappings is None:
default_mappings = []
inp_mappings = inp.get("credentials_mapping", [])
self.mappings = {}
for inp_mapping in default_mappings:
mapping = CredentialMapping(inp_mapping)
self.mappings[mapping.name_for_input()] = mapping
for inp_mapping in inp_mappings:
mapping = CredentialMapping(inp_mapping)
self.mappings[mapping.name_for_input()] = mapping
def validate(self) -> List[str]:
result = []
for mapping in self.mappings.values():
result += mapping.validate()
return result

View file

@ -0,0 +1,52 @@
from typing import List, Optional, Dict
from .common import Validateable, Devops, BuildType, MixinType
from .image import Image
from .c4k import C4k
from .provs_k3s import K3s
from .terraform import TerraformDomain
from .release import Release
from .version import Version
class DevopsFactory:
def __init__(self):
pass
def build_devops(self, inp: dict, version: Optional[Version] = None) -> Devops:
build_types = self.__parse_build_types__(inp["build_types"])
mixin_types = self.__parse_mixin_types__(inp["mixin_types"])
specialized_builds: Dict[BuildType, Validateable] = {}
if BuildType.IMAGE in build_types:
specialized_builds[BuildType.IMAGE] = Image(inp)
if BuildType.C4K in build_types:
specialized_builds[BuildType.C4K] = C4k(inp)
if BuildType.K3S in build_types:
specialized_builds[BuildType.K3S] = K3s(inp)
if BuildType.TERRAFORM in build_types:
specialized_builds[BuildType.TERRAFORM] = TerraformDomain(inp)
mixins: Dict[MixinType, Validateable] = {}
if MixinType.RELEASE in mixin_types:
mixins[MixinType.RELEASE] = Release(inp, version)
devops = Devops(inp, specialized_builds=specialized_builds, mixins=mixins)
devops.throw_if_invalid()
return devops
def merge(self, inp: dict, context: dict, authorization: dict) -> dict:
return {} | context | authorization | inp
def __parse_build_types__(self, build_types: List[str]) -> List[BuildType]:
result = []
for build_type in build_types:
result += [BuildType[build_type]]
return result
def __parse_mixin_types__(self, mixin_types: List[str]) -> List[MixinType]:
result = []
for mixin_type in mixin_types:
result += [MixinType[mixin_type]]
return result

View file

@ -0,0 +1,52 @@
from typing import List, Dict
from .common import (
filter_none,
Validateable,
)
class Image(Validateable):
def __init__(
self,
inp: dict,
):
self.image_dockerhub_user = inp.get("image_dockerhub_user")
self.image_dockerhub_password = inp.get("image_dockerhub_password")
self.image_tag = inp.get("image_tag")
self.image_build_commons_path = inp.get("image_build_commons_path")
self.image_use_package_common_files = inp.get(
"image_use_package_common_files", True
)
self.image_build_commons_dir_name = inp.get(
"image_build_commons_dir_name", "docker"
)
def validate(self) -> List[str]:
result = []
result += self.__validate_is_not_empty__("image_dockerhub_user")
result += self.__validate_is_not_empty__("image_dockerhub_password")
if not self.image_use_package_common_files:
result += self.__validate_is_not_empty__("image_build_commons_path")
result += self.__validate_is_not_empty__("image_build_commons_dir_name")
return result
def build_commons_path(self):
commons_path = [
self.image_build_commons_path,
self.image_build_commons_dir_name,
]
return "/".join(filter_none(commons_path)) + "/"
@classmethod
def get_mapping_default(cls) -> List[Dict[str, str]]:
return [
{
"gopass_path": "meissa/web/docker.com",
"gopass_field": "login",
"name": "image_dockerhub_user",
},
{
"gopass_path": "meissa/web/docker.com",
"name": "image_dockerhub_password",
},
]

View file

@ -0,0 +1,125 @@
from pathlib import Path
from typing import Dict
from .common import Devops, MixinType, BuildType, ProviderType
from .credentials import CredentialMapping, Credentials, GopassType
from .devops_factory import DevopsFactory
from .terraform import TerraformDomain
from .provider_digitalocean import Digitalocean
from .provider_hetzner import Hetzner
from .c4k import C4k
from .image import Image
from .release import ReleaseType
from ..infrastructure import BuildFileRepository, CredentialsApi, EnvironmentApi, GitApi
class InitService:
def __init__(
self,
devops_factory,
build_file_repository,
credentials_api,
environment_api,
git_api,
):
self.devops_factory = devops_factory
self.build_file_repository = build_file_repository
self.credentials_api = credentials_api
self.environment_api = environment_api
self.git_api = git_api
@classmethod
def prod(cls, base_dir: str):
return cls(
DevopsFactory(),
BuildFileRepository(base_dir),
CredentialsApi(),
EnvironmentApi(),
GitApi(),
)
def initialize(self, inp: dict) -> Devops:
build_types = self.devops_factory.__parse_build_types__(
inp.get("build_types", [])
)
mixin_types = self.devops_factory.__parse_mixin_types__(
inp.get("mixin_types", [])
)
provider_types = TerraformDomain.parse_provider_types(
inp.get("tf_provider_types", [])
)
version = None
default_mappings = []
if BuildType.C4K in build_types:
default_mappings += C4k.get_mapping_default()
if BuildType.IMAGE in build_types:
default_mappings += Image.get_mapping_default()
if BuildType.TERRAFORM in build_types:
if ProviderType.DIGITALOCEAN in provider_types:
default_mappings += Digitalocean.get_mapping_default()
if ProviderType.HETZNER in provider_types:
default_mappings += Hetzner.get_mapping_default()
if MixinType.RELEASE in mixin_types:
primary_build_file_id = inp.get(
"release_primary_build_file", "./project.clj"
)
primary_build_file = self.build_file_repository.get(
Path(primary_build_file_id)
)
version = primary_build_file.get_version()
credentials = Credentials(inp, default_mappings)
authorization = self.authorization(credentials)
context = self.context(mixin_types, version)
merged = self.devops_factory.merge(inp, context, authorization)
return self.devops_factory.build_devops(merged, version=version)
def context(self, mixin_types, version) -> dict:
result = {}
tag = self.environment_api.get("IMAGE_TAG")
if MixinType.RELEASE in mixin_types:
release_type = self.environment_api.get("RELEASE_TYPE")
if not release_type:
latest_commit = self.git_api.get_latest_commit()
if latest_commit in [
ReleaseType.MAJOR.name,
ReleaseType.MINOR.name,
ReleaseType.PATCH.name,
ReleaseType.NONE.name,
]:
release_type = latest_commit
result["release_type"] = release_type
result["release_current_branch"] = self.git_api.get_current_branch()
if not tag:
tag = version.to_string()
if tag:
result["image_tag"] = tag
return result
def authorization(self, credentials: Credentials) -> Dict[str, CredentialMapping]:
result = {}
for name in credentials.mappings.keys():
mapping = credentials.mappings[name]
env_value = self.environment_api.get(mapping.name_for_environment())
if env_value:
result[name] = env_value
else:
if mapping.gopass_type() == GopassType.FIELD:
result[name] = self.credentials_api.gopass_field_from_path(
mapping.gopass_path, mapping.gopass_field
)
if mapping.gopass_type() == GopassType.PASSWORD:
result[name] = self.credentials_api.gopass_password_from_path(
mapping.gopass_path
)
return result

View file

@ -0,0 +1,80 @@
from typing import List, Dict, Set, Any
from .common import Validateable, CredentialMappingDefault
class Aws(Validateable, CredentialMappingDefault):
def __init__(
self,
inp: dict,
):
self.stage = inp.get("stage")
self.module = inp.get("module")
self.aws_bucket = inp.get("aws_bucket")
self.aws_bucket_kms_key_id = inp.get("aws_bucket_kms_key_id")
self.aws_account_name = inp.get("aws_account_name", self.stage)
self.aws_bucket_key = inp.get("aws_bucket_key", self.module)
self.aws_as_backend = inp.get("aws_as_backend", False)
self.aws_region = inp.get("aws_region", "eu-central-1")
def validate(self) -> List[str]:
result = []
result += self.__validate_is_not_empty__("stage")
result += self.__validate_is_not_empty__("module")
result += self.__validate_is_not_empty__("aws_account_name")
result += self.__validate_is_not_empty__("aws_as_backend")
if self.aws_as_backend:
result += self.__validate_is_not_empty__("aws_bucket")
result += self.__validate_is_not_empty__("aws_bucket_key")
result += self.__validate_is_not_empty__("aws_bucket_kms_key_id")
result += self.__validate_is_not_empty__("aws_region")
return result
def backend_config(self) -> Dict[str, Any]:
result = {}
if self.aws_as_backend:
result = {
"bucket": self.aws_bucket,
"key": self.__bucket_key__(),
"region": self.aws_region,
}
if self.aws_bucket_kms_key_id:
result["kms_key_id"] = self.aws_bucket_kms_key_id
return result
def resources_from_package(self) -> Set[str]:
result = {"provider_registry.tf", "aws_provider.tf"}
if self.aws_as_backend:
result.update(
{"aws_backend_properties_vars.tf", "aws_backend_with_properties.tf"}
)
return result
def project_vars(self):
result = {}
if self.aws_as_backend:
result.update(
{
"account_name": self.aws_account_name,
"bucket": self.aws_bucket,
"key": self.__bucket_key__(),
"kms_key_id": self.aws_bucket_kms_key_id,
"region": self.aws_region,
}
)
return result
def is_local_state(self):
return not self.aws_as_backend
def __bucket_key__(self):
result = ""
if self.aws_as_backend:
if self.aws_account_name and self.aws_bucket_key:
result = f"{self.aws_account_name}/{self.aws_bucket_key}"
else:
result = f"{self.stage}/{self.module}"
return result
@classmethod
def get_mapping_default(cls) -> List[Dict[str, str]]:
return []

View file

@ -0,0 +1,102 @@
from typing import List, Dict, Set, Any
from .common import Validateable, CredentialMappingDefault
class Digitalocean(Validateable, CredentialMappingDefault):
def __init__(
self,
inp: dict,
):
self.stage = inp.get("stage")
self.module = inp.get("module")
self.do_api_key = inp.get("do_api_key")
self.do_spaces_access_id = inp.get("do_spaces_access_id")
self.do_spaces_secret_key = inp.get("do_spaces_secret_key")
self.do_as_backend = inp.get("do_as_backend", False)
self.do_account_name = inp.get("do_account_name", self.stage)
self.do_bucket = inp.get("do_bucket")
self.do_bucket_key = inp.get("do_bucket_key")
self.do_endpoint = inp.get("do_endpoint", "fra1.digitaloceanspaces.com")
self.do_region = inp.get("do_region", "eu-central-1")
def validate(self) -> List[str]:
result = []
result += self.__validate_is_not_empty__("stage")
result += self.__validate_is_not_empty__("module")
result += self.__validate_is_not_empty__("do_api_key")
result += self.__validate_is_not_empty__("do_spaces_access_id")
result += self.__validate_is_not_empty__("do_spaces_secret_key")
result += self.__validate_is_not_empty__("do_spaces_secret_key")
result += self.__validate_is_not_none__("do_as_backend")
if self.do_as_backend:
result += self.__validate_is_not_empty__("do_account_name")
result += self.__validate_is_not_empty__("do_endpoint")
result += self.__validate_is_not_empty__("do_bucket")
result += self.__validate_is_not_empty__("do_region")
return result
def backend_config(self) -> Dict[str, Any]:
result = {}
if self.do_as_backend:
result = {
"access_key": self.do_spaces_access_id,
"secret_key": self.do_spaces_secret_key,
"endpoint": self.do_endpoint,
"bucket": self.do_bucket,
"key": self.__bucket_key__(),
"region": self.do_region,
}
return result
def resources_from_package(self) -> Set[str]:
result = {"provider_registry.tf", "do_provider.tf", "do_mixin_vars.tf"}
if self.do_as_backend:
result.update(
{"do_backend_properties_vars.tf", "do_backend_with_properties.tf"}
)
return result
def project_vars(self):
result = {
"do_api_key": self.do_api_key,
"do_spaces_access_id": self.do_spaces_access_id,
"do_spaces_secret_key": self.do_spaces_secret_key,
}
if self.do_as_backend:
result.update(
{
"account_name": self.do_account_name,
"endpoint": self.do_endpoint,
"bucket": self.do_bucket,
"key": self.__bucket_key__(),
"region": self.do_region,
}
)
return result
def is_local_state(self):
return not self.do_as_backend
def __bucket_key__(self):
result = ""
if self.do_as_backend:
if self.do_account_name and self.do_bucket_key:
result = f"{self.do_account_name}/{self.do_bucket_key}"
else:
result = f"{self.stage}/{self.module}"
return result
@classmethod
def get_mapping_default(cls) -> List[Dict[str, str]]:
return [
{
"gopass_path": "server/devops/digitalocean/s3",
"gopass_field": "id",
"name": "do_spaces_access_id",
},
{
"gopass_path": "server/devops/digitalocean/s3",
"gopass_field": "secret",
"name": "do_spaces_secret_key",
},
]

View file

@ -0,0 +1,31 @@
from typing import List, Dict, Set, Any
from .common import Validateable, CredentialMappingDefault
class Hetzner(Validateable, CredentialMappingDefault):
def __init__(
self,
inp: dict,
):
self.hetzner_api_key = inp.get("hetzner_api_key")
def validate(self) -> List[str]:
result = []
result += self.__validate_is_not_empty__("hetzner_api_key")
return result
def backend_config(self) -> Dict[str, Any]:
return {}
def resources_from_package(self) -> Set[str]:
return {"provider_registry.tf", "hetzner_provider.tf", "hetzner_mixin_vars.tf"}
def project_vars(self):
return {"hetzner_api_key": self.hetzner_api_key}
def is_local_state(self):
return True
@classmethod
def get_mapping_default(cls) -> List[Dict[str, str]]:
return []

View file

@ -0,0 +1,92 @@
from typing import List, Optional
from string import Template
from .common import (
Validateable,
DnsRecord,
Devops,
)
CONFIG_BASE = """
fqdn: $fqdn
"""
CONFIG_IPV4 = """node:
ipv4: $ipv4
"""
CONFIG_IPV6 = """ ipv6: $ipv6
"""
CONFIG_CERTMANAGER = """certmanager:
email: $letsencrypt_email
letsencryptEndpoint: $letsencrypt_endpoint
"""
CONFIG_ECHO = """echo: $echo
"""
class K3s(Validateable):
def __init__(self, inp: dict):
self.k3s_provision_user = inp.get("k3s_provision_user")
self.k3s_letsencrypt_email = inp.get("k3s_letsencrypt_email")
self.k3s_letsencrypt_endpoint = inp.get("k3s_letsencrypt_endpoint")
self.k3s_app_filename_to_provision = inp.get("k3s_app_filename_to_provision")
self.k3s_enable_echo = inp.get("k3s_enable_echo", "false")
self.k3s_provs_template = inp.get("k3s_provs_template", None)
self.provision_dns: Optional[DnsRecord] = None
def validate(self) -> List[str]:
result = []
result += self.__validate_is_not_empty__("k3s_letsencrypt_email")
result += self.__validate_is_not_empty__("k3s_letsencrypt_endpoint")
result += self.__validate_is_not_empty__("k3s_app_filename_to_provision")
if self.provision_dns:
result += self.provision_dns.validate()
return result
def update_runtime_config(self, dns_record: DnsRecord):
self.provision_dns = dns_record
self.throw_if_invalid()
def provs_config(self) -> str:
if not self.provision_dns:
raise ValueError("provision_dns was not set.")
substitutes = {
"fqdn": self.provision_dns.fqdn,
}
if self.provision_dns.ipv4 is not None:
substitutes["ipv4"] = self.provision_dns.ipv4
if self.provision_dns.ipv6 is not None:
substitutes["ipv6"] = self.provision_dns.ipv6
if self.k3s_letsencrypt_email is not None:
substitutes["letsencrypt_email"] = self.k3s_letsencrypt_email
if self.k3s_letsencrypt_endpoint is not None:
substitutes["letsencrypt_endpoint"] = self.k3s_letsencrypt_endpoint
if self.k3s_enable_echo is not None:
substitutes["echo"] = self.k3s_enable_echo
return self.__config_template__().substitute(substitutes)
def command(self, devops: Devops):
if not self.provision_dns:
raise ValueError("provision_dns was not set.")
cmd = [
"provs-server.jar",
"k3s",
f"{self.k3s_provision_user}@{self.provision_dns.fqdn}",
"-c",
f"{devops.build_path()}/out_k3sServerConfig.yaml",
"-a",
f"{devops.build_path()}/{self.k3s_app_filename_to_provision}",
]
return " ".join(cmd)
def __config_template__(self) -> Template:
template_text = self.k3s_provs_template
if template_text is None:
template_text = CONFIG_BASE
if self.k3s_letsencrypt_endpoint is not None:
template_text += CONFIG_CERTMANAGER
if self.k3s_enable_echo is not None:
template_text += CONFIG_ECHO
if self.provision_dns.ipv4 is not None:
template_text += CONFIG_IPV4
if self.provision_dns.ipv6 is not None:
template_text += CONFIG_IPV6
return Template(template_text)

View file

@ -0,0 +1,58 @@
from typing import Optional, List
from pathlib import Path
from .common import (
Validateable,
ReleaseType,
)
from .version import (
Version,
)
class Release(Validateable):
def __init__(self, inp: dict, version: Optional[Version]):
self.release_type = ReleaseType[inp.get("release_type", "NONE")]
self.release_main_branch = inp.get("release_main_branch", "main")
self.release_current_branch = inp.get("release_current_branch")
self.release_primary_build_file = inp.get(
"release_primary_build_file", "./project.clj"
)
self.release_secondary_build_files = inp.get(
"release_secondary_build_files", []
)
self.version = version
def validate(self):
result = []
result += self.__validate_is_not_empty__("release_type")
result += self.__validate_is_not_empty__("release_main_branch")
result += self.__validate_is_not_empty__("release_current_branch")
result += self.__validate_is_not_empty__("release_primary_build_file")
result += self.__validate_is_not_empty__("version")
try:
Path(self.release_primary_build_file)
except Exception as e:
result.append(
f"release_primary_build_file must be a valid path but was {e}"
)
for path in self.release_secondary_build_files:
try:
Path(path)
except Exception as e:
result.append(
f"release_secondary_build_file must be contain valid paths but was {e}"
)
if self.version:
result += self.version.validate()
if (
self.release_type is not None
and self.release_type != ReleaseType.NONE
and self.release_main_branch != self.release_current_branch
):
result.append(f"Releases are allowed only on {self.release_main_branch}")
return result
def build_files(self) -> List[str]:
result = [self.release_primary_build_file]
result += self.release_secondary_build_files
return result

View file

@ -0,0 +1,100 @@
from typing import List, Set, Dict, Any
from pathlib import Path
from .common import (
Validateable,
ProviderType,
filter_none,
)
from .provider_digitalocean import Digitalocean
from .provider_hetzner import Hetzner
from .provider_aws import Aws
class TerraformDomain(Validateable):
def __init__(self, inp: dict):
self.module = inp.get("module")
self.stage = inp.get("stage")
self.tf_additional_vars = inp.get("tf_additional_vars")
self.tf_output_json_name = inp.get("tf_output_json_name")
self.tf_build_commons_path = inp.get("tf_build_commons_path")
self.tf_provider_types = inp.get("tf_provider_types", [])
self.tf_additional_resources_from_package = inp.get(
"tf_additional_resources_from_package", set()
)
self.tf_additional_tfvar_files = inp.get("tf_additional_tfvar_files", [])
self.tf_use_workspace = inp.get("tf_use_workspace", True)
self.tf_debug_print_terraform_command = inp.get(
"tf_debug_print_terraform_command", False
)
self.tf_build_commons_dir_name = inp.get(
"tf_build_commons_dir_name", "terraform"
)
self.tf_terraform_semantic_version = inp.get(
"tf_terraform_semantic_version", "1.0.8"
)
self.tf_use_package_common_files = inp.get("tf_use_package_common_files", True)
provider_types = TerraformDomain.parse_provider_types(self.tf_provider_types)
self.providers: Dict[ProviderType, Any] = {}
if ProviderType.DIGITALOCEAN in provider_types:
self.providers[ProviderType.DIGITALOCEAN] = Digitalocean(inp)
if ProviderType.HETZNER in provider_types:
self.providers[ProviderType.HETZNER] = Hetzner(inp)
if ProviderType.AWS in provider_types:
self.providers[ProviderType.AWS] = Aws(inp)
def validate(self) -> List[str]:
result = []
result += self.__validate_is_not_empty__("module")
result += self.__validate_is_not_empty__("stage")
result += self.__validate_is_not_empty__("tf_build_commons_dir_name")
result += self.__validate_is_not_none__("tf_additional_resources_from_package")
result += self.__validate_is_not_none__("tf_additional_tfvar_files")
result += self.__validate_is_not_none__("tf_provider_types")
for provider in self.providers.values():
result += provider.validate()
return result
def output_json_name(self) -> str:
if self.tf_output_json_name:
return self.tf_output_json_name
else:
return f"out_{self.module}.json"
def terraform_build_commons_path(self) -> Path:
mylist = [self.tf_build_commons_path, self.tf_build_commons_dir_name]
return Path("/".join(filter_none(mylist)) + "/")
def project_vars(self):
result = {"stage": self.stage, "module": self.module}
for provider in self.providers.values():
result.update(provider.project_vars())
if self.tf_additional_vars:
result.update(self.tf_additional_vars)
return result
def resources_from_package(self) -> Set[str]:
result = {"versions.tf", "terraform_build_vars.tf"}
for provider in self.providers.values():
result = result.union(provider.resources_from_package())
result = result.union(self.tf_additional_resources_from_package)
return result
def is_local_state(self):
result = True
for provider in self.providers.values():
result = result and provider.is_local_state()
return result
def backend_config(self) -> Dict[str, Any]:
result = {}
for provider in self.providers.values():
result.update(provider.backend_config())
return result
@classmethod
def parse_provider_types(cls, tf_provider_types: List[str]) -> List[ProviderType]:
result = []
for provider_type in tf_provider_types:
result.append(ProviderType[provider_type])
return result

View file

@ -0,0 +1,99 @@
from typing import Optional
from .common import (
Validateable,
)
class Version(Validateable):
@classmethod
def from_str(cls, input_str: str):
snapshot_parsed = input_str.split("-")
version_str = snapshot_parsed[0]
suffix_str = None
if len(snapshot_parsed) > 1:
suffix_str = snapshot_parsed[1]
version_no_parsed = [int(x) for x in version_str.split(".")]
return cls(
version_no_parsed,
suffix_str,
input_str,
)
def __init__(
self,
version_list: list,
snapshot_suffix: Optional[str] = None,
version_str: Optional[str] = None,
):
self.version_list = version_list
self.snapshot_suffix = snapshot_suffix
self.version_string = version_str
def __eq__(self, other):
return other and self.to_string() == other.to_string()
def __hash__(self) -> int:
return self.to_string().__hash__()
def is_snapshot(self):
return self.snapshot_suffix is not None
def to_string(self) -> str:
version_no = ".".join([str(x) for x in self.version_list])
if self.is_snapshot():
return f"{version_no}-{self.snapshot_suffix}"
return version_no
def validate(self):
result = []
result += self.__validate_is_not_empty__("version_list")
if self.version_list and len(self.version_list) < 3:
result += ["version_list must have at least 3 levels."]
if (
self.version_list
and self.version_string
and self.to_string() != self.version_string
):
result += [
f"version_string not parsed correct. Input was {self.version_string} parsed was {self.to_string()}"
]
return result
def create_bump(self, snapshot_suffix: Optional[str] = None):
new_version_list = self.version_list.copy()
if self.is_snapshot():
return Version(
new_version_list, snapshot_suffix=self.snapshot_suffix, version_str=None
)
else:
new_version_list[2] += 1
return Version(
new_version_list, snapshot_suffix=snapshot_suffix, version_str=None
)
def create_patch(self):
new_version_list = self.version_list.copy()
if self.is_snapshot():
return Version(new_version_list, snapshot_suffix=None, version_str=None)
else:
new_version_list[2] += 1
return Version(new_version_list, snapshot_suffix=None, version_str=None)
def create_minor(self):
new_version_list = self.version_list.copy()
if self.is_snapshot() and new_version_list[2] == 0:
return Version(new_version_list, snapshot_suffix=None, version_str=None)
else:
new_version_list[2] = 0
new_version_list[1] += 1
return Version(new_version_list, snapshot_suffix=None, version_str=None)
def create_major(self):
new_version_list = self.version_list.copy()
if self.is_snapshot() and new_version_list[2] == 0 and new_version_list[1] == 0:
return Version(new_version_list, snapshot_suffix=None, version_str=None)
else:
new_version_list[2] = 0
new_version_list[1] = 0
new_version_list[0] += 1
return Version(new_version_list, snapshot_suffix=None, version_str=None)

View file

@ -1,31 +0,0 @@
from .devops_terraform_build import DevopsTerraformBuild
def add_exoscale_mixin_config(config, exoscale_api_key, exoscale_secret_key):
config.update({'ExoscaleMixin':
{'exoscale_api_key': exoscale_api_key,
'exoscale_secret_key': exoscale_secret_key}})
return config
class ExoscaleMixin(DevopsTerraformBuild):
def __init__(self, project, config):
super().__init__(project, config)
exoscale_mixin_config = config['ExoscaleMixin']
self.exoscale_api_key = exoscale_mixin_config['exoscale_api_key']
self.exoscale_secret_key = exoscale_mixin_config['exoscale_secret_key']
def project_vars(self):
ret = super().project_vars()
if self.exoscale_api_key:
ret['exoscale_api_key'] = self.exoscale_api_key
if self.exoscale_secret_key:
ret['exoscale_secret_key'] = self.exoscale_secret_key
return ret
def copy_build_resources_from_package(self):
super().copy_build_resources_from_package()
self.copy_build_resource_file_from_package('provider_registry.tf')
self.copy_build_resource_file_from_package('exoscale_provider.tf')
self.copy_build_resource_file_from_package('exoscale_mixin_vars.tf')

View file

@ -1,27 +0,0 @@
from .devops_terraform_build import DevopsTerraformBuild
def add_hetzner_mixin_config(config, hetzner_api_key):
config.update({'HetznerMixin':
{'hetzner_api_key': hetzner_api_key}})
return config
class HetznerMixin(DevopsTerraformBuild):
def __init__(self, project, config):
super().__init__(project, config)
hetzner_mixin_config = config['HetznerMixin']
self.hetzner_api_key = hetzner_mixin_config['hetzner_api_key']
def project_vars(self):
ret = super().project_vars()
if self.hetzner_api_key:
ret['hetzner_api_key'] = self.hetzner_api_key
return ret
def copy_build_resources_from_package(self):
super().copy_build_resources_from_package()
self.copy_build_resource_file_from_package('provider_registry.tf')
self.copy_build_resource_file_from_package('hetzner_provider.tf')
self.copy_build_resource_file_from_package('hetzner_mixin_vars.tf')

View file

@ -0,0 +1,11 @@
from .infrastructure import (
FileApi,
ImageApi,
ResourceApi,
ExecutionApi,
EnvironmentApi,
CredentialsApi,
GitApi,
TerraformApi,
)
from .repository import DevopsRepository, BuildFileRepository

View file

@ -0,0 +1,206 @@
from subprocess import Popen, PIPE, run
from pathlib import Path
from sys import stdout
from os import chmod, environ
from json import load, dumps
import yaml
from pkg_resources import resource_string
class ResourceApi:
def read_resource(self, path: str) -> bytes:
return resource_string("ddadevops", path)
class FileApi:
def __init__(self):
self.execution_api = ExecutionApi()
def clean_dir(self, directory: str):
self.execution_api.execute("rm -rf " + directory)
self.execution_api.execute("mkdir -p " + directory)
def cp(self, src: str, target_dir: str, check=True):
self.execution_api.execute(f"cp {src} {target_dir}", check=check)
def cp_force(self, src: str, target_dir: str, check=True):
self.execution_api.execute(f"cp -f {src}* {target_dir}", check=check)
def cp_recursive(self, src: str, target_dir: str, check=True):
self.execution_api.execute(f"cp -r {src} {target_dir}", check=check)
def write_data_to_file(self, path: Path, data: bytes):
with open(path, "w", encoding="utf-8") as output_file:
output_file.write(data.decode(stdout.encoding))
def write_yaml_to_file(self, path: Path, data: map):
with open(path, "w", encoding="utf-8") as output_file:
yaml.dump(data, output_file)
chmod(path, 0o600)
def write_json_to_file(self, path: Path, data: map):
with open(path, "w", encoding="utf-8") as output_file:
output_file.write(dumps(data))
chmod(path, 0o600)
def read_json_fro_file(self, path: Path) -> map:
with open(path, "r", encoding="utf-8") as input_file:
return load(input_file)
class ImageApi:
def image(self, name: str, path: Path):
run(
f"docker build -t {name} --file {path}/image/Dockerfile {path}/image",
shell=True,
check=True,
)
def drun(self, name: str):
run(
f'docker run -it --entrypoint="" {name} /bin/bash',
shell=True,
check=True,
)
def dockerhub_login(self, username: str, password: str):
run(
f"docker login --username {username} --password {password}",
shell=True,
check=True,
)
def dockerhub_publish(self, name: str, username: str, tag=None):
if tag is not None:
run(
f"docker tag {name} {username}/{name}:{tag}",
shell=True,
check=True,
)
run(
f"docker push {username}/{name}:{tag}",
shell=True,
check=True,
)
run(
f"docker tag {name} {username}/{name}:latest",
shell=True,
check=True,
)
run(
f"docker push {username}/{name}:latest",
shell=True,
check=True,
)
def test(self, name: str, path: Path):
run(
f"docker build -t {name} -test --file {path}/test/Dockerfile {path}/test",
shell=True,
check=True,
)
class ExecutionApi:
def execute(self, command: str, dry_run=False, shell=True, check=True):
output = ""
if dry_run:
print(command)
else:
# output = check_output(command, encoding="UTF-8", shell=shell)
output = run(
command, encoding="UTF-8", shell=shell, stdout=PIPE, check=check
).stdout
output = output.rstrip()
return output
def execute_live(self, command, dry_run=False, shell=True):
if dry_run:
print(command)
else:
process = Popen(command, stdout=PIPE, shell=shell)
for line in iter(process.stdout.readline, b""):
print(line.decode("utf-8"), end="")
process.stdout.close()
process.wait()
class EnvironmentApi:
def get(self, key):
return environ.get(key)
class CredentialsApi:
def __init__(self):
self.execution_api = ExecutionApi()
def gopass_field_from_path(self, path, field):
credential = None
if path and field:
print("get field for: " + path + ", " + field)
credential = self.execution_api.execute(
["gopass", "show", path, field], shell=False
)
return credential
def gopass_password_from_path(self, path):
credential = None
if path:
print("get password for: " + path)
credential = self.execution_api.execute(
["gopass", "show", "--password", path], shell=False
)
return credential
class GitApi:
def __init__(self):
self.execution_api = ExecutionApi()
# pylint: disable=invalid-name
def get_latest_n_commits(self, n: int):
return self.execution_api.execute(f'git log --oneline --format="%s %b" -n {n}')
def get_latest_commit(self):
return self.get_latest_n_commits(1)
def tag_annotated(self, annotation: str, message: str, count: int):
return self.execution_api.execute(
f"git tag -a {annotation} -m '{message}' HEAD~{count}"
)
def tag_annotated_second_last(self, annotation: str, message: str):
return self.tag_annotated(annotation, message, 1)
def get_latest_tag(self):
return self.execution_api.execute("git describe --tags --abbrev=0")
def get_current_branch(self):
return "".join(self.execution_api.execute("git branch --show-current")).rstrip()
def init(self, default_branch: str = "main"):
self.execution_api.execute("git init")
self.execution_api.execute(f"git checkout -b {default_branch}")
def set_user_config(self, email: str, name: str):
self.execution_api.execute(f"git config user.email {email}")
self.execution_api.execute(f"git config user.name {name}")
def add_file(self, file_path: Path):
return self.execution_api.execute(f"git add {file_path}")
def add_remote(self, origin: str, url: str):
return self.execution_api.execute(f"git remote add {origin} {url}")
def commit(self, commit_message: str):
return self.execution_api.execute(f'git commit -m "{commit_message}"')
def push(self):
return self.execution_api.execute("git push")
def checkout(self, branch: str):
return self.execution_api.execute(f"git checkout {branch}")
class TerraformApi:
pass

View file

@ -0,0 +1,37 @@
from pathlib import Path
from ..domain.common import Devops
from ..domain.build_file import BuildFile
class DevopsRepository:
def get_devops(self, project) -> Devops:
devops = project.get_property("devops")
devops.throw_if_invalid()
return devops
def set_devops(self, project, devops: Devops):
devops.throw_if_invalid()
project.set_property("devops", devops)
class BuildFileRepository:
def __init__(self, base_dir: str):
self.base_dir = Path(base_dir)
def get(self, path: Path) -> BuildFile:
with open(self.base_dir.joinpath(path), "r", encoding="utf-8") as file:
content = file.read()
result = BuildFile(path, content)
result.throw_if_invalid()
return result
def write(self, build_file: BuildFile):
build_file.throw_if_invalid()
with open(
self.base_dir.joinpath(build_file.file_path),
"r+",
encoding="utf-8",
) as file:
file.seek(0)
file.write(build_file.content)
file.truncate()

View file

@ -0,0 +1,38 @@
from .domain import DnsRecord, BuildType
from .infrastructure import ExecutionApi
from .devops_build import DevopsBuild
class ProvsK3sBuild(DevopsBuild):
def __init__(self, project, config):
inp = config.copy()
inp["name"] = project.name
inp["module"] = config.get("module")
inp["stage"] = config.get("stage")
inp["project_root_path"] = config.get("project_root_path")
inp["build_types"] = config.get("build_types", [])
inp["mixin_types"] = config.get("mixin_types", [])
super().__init__(project, inp)
self.execution_api = ExecutionApi()
devops = self.devops_repo.get_devops(self.project)
if BuildType.K3S not in devops.specialized_builds:
raise ValueError("K3SBuild requires BuildType.K3S")
def update_runtime_config(self, dns_record: DnsRecord):
super().update_runtime_config(dns_record)
devops = self.devops_repo.get_devops(self.project)
devops.specialized_builds[BuildType.K3S].update_runtime_config(dns_record)
self.devops_repo.set_devops(self.project, devops)
def write_provs_config(self):
devops = self.devops_repo.get_devops(self.project)
k3s = devops.specialized_builds[BuildType.K3S]
with open(
self.build_path() + "/out_k3sServerConfig.yaml", "w", encoding="utf-8"
) as output_file:
output_file.write(k3s.provs_config())
def provs_apply(self, dry_run=False):
devops = self.devops_repo.get_devops(self.project)
k3s = devops.specialized_builds[BuildType.K3S]
self.execution_api.execute_live(k3s.command(devops), dry_run=dry_run)

View file

@ -1,119 +0,0 @@
from string import Template
import deprecation
from .python_util import execute_live
from .devops_build import DevopsBuild
CONFIG_BASE = """
fqdn: $fqdn
"""
CONFIG_IPV4 = """node:
ipv4: $ipv4
"""
CONFIG_IPV6 = """ ipv6: $ipv6
"""
CONFIG_CERTMANAGER = """certmanager:
email: $letsencrypt_email
letsencryptEndpoint: $letsencrypt_endpoint
"""
CONFIG_ECHO = """echo: $echo
"""
def add_provs_k3s_mixin_config(config,
provision_user='root',
echo=None,
k3s_config_template=None,
letsencrypt_email=None,
letsencrypt_endpoint=None,
fqdn=None,
ipv4=None,
ipv6=None,
app_filename_to_provision=None):
template_text = k3s_config_template
if template_text is None:
template_text = CONFIG_BASE
if letsencrypt_endpoint is not None:
template_text += CONFIG_CERTMANAGER
if echo is not None:
template_text += CONFIG_ECHO
if ipv4 is not None:
template_text += CONFIG_IPV4
if ipv6 is not None:
template_text += CONFIG_IPV6
config.update({'ProvsK3sMixin':
{'fqdn': fqdn,
'provision_user': provision_user,
'ipv4': ipv4,
'ipv6': ipv6,
'letsencrypt_email': letsencrypt_email,
'letsencrypt_endpoint': letsencrypt_endpoint,
'echo': echo,
'k3s_config_template': template_text,
'app_filename_to_provision': app_filename_to_provision}})
return config
class ProvsK3sMixin(DevopsBuild):
def __init__(self, project, config):
super().__init__(project, config)
provs_k3s_mixin_config = config['ProvsK3sMixin']
self.fqdn = provs_k3s_mixin_config['fqdn']
self.put('fqdn', self.fqdn)
self.provision_user = provs_k3s_mixin_config['provision_user']
self.put('provision_user', self.provision_user)
self.ipv4 = provs_k3s_mixin_config['ipv4']
self.put('ipv4', self.ipv4)
self.ipv6 = provs_k3s_mixin_config['ipv6']
self.put('ipv6', self.ipv6)
self.letsencrypt_email = provs_k3s_mixin_config['letsencrypt_email']
self.put('letsencrypt_email', self.letsencrypt_email)
self.letsencrypt_endpoint = provs_k3s_mixin_config['letsencrypt_endpoint']
self.put('letsencrypt_endpoint', self.letsencrypt_endpoint)
self.echo = provs_k3s_mixin_config['echo']
self.put('echo', self.echo)
self.k3s_config_template_text = provs_k3s_mixin_config['k3s_config_template']
self.k3s_config_template = Template(
provs_k3s_mixin_config['k3s_config_template'])
self.put('k3s_config_template', self.k3s_config_template)
self.app_filename_to_provision = provs_k3s_mixin_config['app_filename_to_provision']
self.put('app_filename_to_provision', self.app_filename_to_provision)
def update_runtime_config(self, fqdn, ipv4, ipv6=None):
self.fqdn = fqdn
self.put('fqdn', fqdn)
self.ipv4 = ipv4
self.put('ipv4', ipv4)
self.ipv6 = ipv6
self.put('ipv6', ipv6)
template_text = self.k3s_config_template_text
if ipv4 is not None:
template_text += CONFIG_IPV4
if ipv6 is not None:
template_text += CONFIG_IPV6
self.k3s_config_template_text = template_text
self.put('k3s_config_template_text', template_text)
template = Template(template_text)
self.k3s_config_template = template
self.put('k3s_config_template', template)
def write_provs_config(self):
substitutes = self.get_keys(['fqdn', 'ipv4', 'ipv6', 'letsencrypt_email',
'letsencrypt_endpoint', 'echo'])
with open(self.build_path() + '/out_k3sServerConfig.yaml', "w", encoding="utf-8") as output_file:
output_file.write(self.k3s_config_template.substitute(substitutes))
@deprecation.deprecated(deprecated_in="3.1")
def provs_server(self, dry_run=False):
self.provs_apply(dry_run)
def provs_apply(self, dry_run=False):
cmd = ['provs-server.jar', 'k3s', self.provision_user + '@' + self.fqdn, '-c',
self.build_path() + '/out_k3sServerConfig.yaml',
'-a', self.build_path() + '/' + self.app_filename_to_provision]
if dry_run:
print(" ".join(cmd))
else:
execute_live(cmd)

View file

@ -1,19 +0,0 @@
from subprocess import check_output, Popen, PIPE
import sys
def execute(cmd, shell=False):
if sys.version_info.major == 3:
output = check_output(cmd, encoding='UTF-8', shell=shell)
else:
output = check_output(cmd, shell=shell)
return output.rstrip()
def execute_live(cmd):
process = Popen(cmd, stdout=PIPE)
for line in iter(process.stdout.readline, b''):
print(line.decode('utf-8'), end='')
process.stdout.close()
process.wait()
def filter_none(list_to_filter):
return [x for x in list_to_filter if x is not None]

View file

@ -0,0 +1,23 @@
from pybuilder.core import Project
from .devops_build import DevopsBuild
from .application import ReleaseService
from .domain import MixinType
class ReleaseMixin(DevopsBuild):
def __init__(self, project: Project, inp: dict):
super().__init__(project, inp)
self.release_service = ReleaseService.prod(project.basedir)
devops = self.devops_repo.get_devops(self.project)
if MixinType.RELEASE not in devops.mixins:
raise ValueError("ReleaseMixin requires MixinType.RELEASE")
def prepare_release(self):
devops = self.devops_repo.get_devops(self.project)
release = devops.mixins[MixinType.RELEASE]
self.release_service.prepare_release(release)
def tag_bump_and_push_release(self):
devops = self.devops_repo.get_devops(self.project)
release = devops.mixins[MixinType.RELEASE]
self.release_service.tag_bump_and_push_release(release)

View file

@ -1,5 +1,5 @@
variable "account_name" {}
variable "bucket" {}
variable "key" {}
variable "kms_key_id" {}
variable "region" {}
variable "account_name" {}
variable "region" {}

View file

@ -1,5 +1,5 @@
variable "account_name" {}
variable "endpoint" {}
variable "bucket" {}
variable "key" {}
variable "region" {}
variable "account_name" {}
variable "region" {}

View file

@ -1,5 +0,0 @@
variable "exoscale_api_key" {
}
variable "exoscale_secret_key" {
}

View file

@ -1,4 +0,0 @@
provider "exoscale" {
key = "${var.exoscale_api_key}"
secret = "${var.exoscale_secret_key}"
}

View file

View file

View file

@ -0,0 +1,147 @@
from pathlib import Path
from src.main.python.ddadevops.domain import DevopsFactory, Devops, Version, BuildFile
def devops_config(overrides: dict) -> dict:
default = {
"name": "name",
"module": "module",
"stage": "test",
"project_root_path": "root_path",
"build_dir_name": "target",
"build_types": ["IMAGE", "C4K", "K3S", "TERRAFORM"],
"mixin_types": ["RELEASE"],
"image_dockerhub_user": "dockerhub_user",
"image_dockerhub_password": "dockerhub_password",
"image_tag": "image_tag",
"c4k_config": {},
"c4k_grafana_cloud_user": "user",
"c4k_grafana_cloud_password": "password",
"c4k_grafana_cloud_url": "https://prometheus-prod-01-eu-west-0.grafana.net/api/prom/push",
"c4k_auth": {},
"k3s_provision_user": "k3s_provision_user",
"k3s_letsencrypt_email": "k3s_letsencrypt_email",
"k3s_letsencrypt_endpoint": "k3s_letsencrypt_endpoint",
"k3s_enable_echo": "false",
"k3s_app_filename_to_provision": "k3s_app.yaml",
"tf_provider_types": ["DIGITALOCEAN", "HETZNER", "AWS"],
"tf_additional_vars": [],
"tf_output_json_name": "the_out.json",
"tf_use_workspace": None,
"tf_use_package_common_files": None,
"tf_build_commons_path": "build_commons_path",
"tf_build_commons_dir_name": "terraform",
"tf_debug_print_terraform_command": None,
"tf_additional_tfvar_files": [],
"tf_terraform_semantic_version": None,
"do_api_key": "api_key",
"do_spaces_access_id": "spaces_id",
"do_spaces_secret_key": "spaces_secret",
"do_api_key": "api_key",
"do_spaces_access_id": "spaces_id",
"do_spaces_secret_key": "spaces_secret",
"do_as_backend": True,
"do_endpoint": "endpoint",
"do_bucket": "bucket",
"do_region": "region",
"hetzner_api_key": "hetzner_api_key",
"aws_as_backend": True,
"aws_bucket": "bucket",
"aws_region": "region",
"aws_bucket_kms_key_id": "aws_bucket_kms_key_id",
"release_type": "NONE",
"release_main_branch": "main",
"release_current_branch": "my_feature",
"release_primary_build_file": "./package.json",
"release_secondary_build_file": [],
"credentials_mappings": [
{
"gopass_path": "a/path",
"gopass_field": "a-field",
},
],
}
input = default.copy()
input.update(overrides)
return input
def build_devops(
overrides: dict, version: Version = Version.from_str("1.0.0-SNAPSHOT")
) -> Devops:
return DevopsFactory().build_devops(devops_config(overrides), version=version)
class BuildFileRepositoryMock:
def get(self, path: Path) -> BuildFile:
return BuildFile(
Path("./package.json"),
"""
{
"version": "1.1.5-SNAPSHOT"
}
""",
)
def write(self, build_file: BuildFile):
pass
class EnvironmentApiMock:
def __init__(self, mappings):
self.mappings = mappings
def get(self, key):
return self.mappings.get(key, None)
class CredentialsApiMock:
def __init__(self, mappings):
self.mappings = mappings
def gopass_field_from_path(self, path, field):
return self.mappings.get(f"{path}:{field}", None)
def gopass_password_from_path(self, path):
return self.mappings.get(path, None)
class GitApiMock:
def get_latest_n_commits(self, n: int):
pass
def get_latest_commit(self):
pass
def tag_annotated(self, annotation: str, message: str, count: int):
pass
def tag_annotated_second_last(self, annotation: str, message: str):
pass
def get_latest_tag(self):
pass
def get_current_branch(self):
pass
def init(self, default_branch: str = "main"):
pass
def set_user_config(self, email: str, name: str):
pass
def add_file(self, file_path: Path):
pass
def add_remote(self, origin: str, url: str):
pass
def commit(self, commit_message: str):
pass
def push(self):
pass
def checkout(self, branch: str):
pass

View file

@ -0,0 +1,151 @@
import pytest
from pathlib import Path
from src.main.python.ddadevops.domain import (
BuildFileType,
BuildFile,
Version,
)
def test_sould_validate_build_file():
sut = BuildFile(Path("./project.clj"), "content")
assert sut.is_valid()
sut = BuildFile(None, "")
assert not sut.is_valid()
sut = BuildFile(Path("./unknown.extension"), "content")
assert not sut.is_valid()
def test_sould_calculate_build_type():
sut = BuildFile(Path("./project.clj"), "content")
assert sut.build_file_type() == BuildFileType.JAVA_CLOJURE
sut = BuildFile(Path("./build.gradle"), "content")
assert sut.build_file_type() == BuildFileType.JAVA_GRADLE
sut = BuildFile(Path("./package.json"), "content")
assert sut.build_file_type() == BuildFileType.JS
def test_sould_parse_and_set_js():
sut = BuildFile(
Path("./package.json"),
"""
{
"name":"c4k-jira",
"description": "Generate c4k yaml for a jira deployment.",
"author": "meissa GmbH",
"version": "1.1.5-SNAPSHOT",
"homepage": "https://gitlab.com/domaindrivenarchitecture/c4k-jira#readme",
"bin":{
"c4k-jira": "./c4k-jira.js"
}
}
""",
)
assert sut.get_version() == Version.from_str("1.1.5-SNAPSHOT")
sut = BuildFile(
Path("./package.json"),
"""
{
"name":"c4k-jira",
}
""",
)
with pytest.raises(Exception):
sut.get_version()
sut = BuildFile(
Path("./package.json"),
"""
{
"name":"c4k-jira",
"version": "1.1.5-SNAPSHOT"
}
""",
)
sut.set_version(Version.from_str("1.1.5-SNAPSHOT").create_major())
assert """{
"name": "c4k-jira",
"version": "2.0.0"
}""" == sut.content
def test_sould_parse_and_set_version_for_gradle():
sut = BuildFile(
Path("./build.gradle"),
"""
version = "1.1.5-SNAPSHOT"
""",
)
assert sut.get_version() == Version.from_str("1.1.5-SNAPSHOT")
sut = BuildFile(
Path("./build.gradle"),
"""
version = "1.1.5-SNAPSHOT"
""",
)
sut.set_version(Version.from_str("1.1.5-SNAPSHOT").create_major())
assert '\nversion = "2.0.0"\n' == sut.content
def test_sould_parse_and_set_version_for_py():
sut = BuildFile(
Path("./build.py"),
"""
from pybuilder.core import init, use_plugin, Author
use_plugin("python.core")
name = "ddadevops"
version = "1.1.5-dev"
""",
)
assert sut.get_version() == Version.from_str("1.1.5-dev")
sut = BuildFile(
Path("./build.py"),
"""
version = "1.1.5-dev1"
""",
)
sut.set_version(Version.from_str("1.1.5-dev1").create_major())
assert '\nversion = "2.0.0"\n' == sut.content
def test_sould_parse_and_set_version_for_clj():
sut = BuildFile(
Path("./project.clj"),
"""
(defproject org.domaindrivenarchitecture/c4k-jira "1.1.5-SNAPSHOT"
:description "jira c4k-installation package"
:url "https://domaindrivenarchitecture.org"
)
""",
)
assert sut.get_version() == Version.from_str("1.1.5-SNAPSHOT")
sut = BuildFile(
Path("./project.clj"),
"""
(defproject org.domaindrivenarchitecture/c4k-jira "1.1.5-SNAPSHOT"
:description "jira c4k-installation package"
)
""",
)
sut.set_version(Version.from_str("1.1.5-SNAPSHOT").create_major())
assert '\n(defproject org.domaindrivenarchitecture/c4k-jira "2.0.0"\n :description "jira c4k-installation package"\n)\n' == sut.content
sut = BuildFile(
Path("./project.clj"),
"""
(defproject org.domaindrivenarchitecture/c4k-jira "1.1.5-SNAPSHOT"
:dependencies [[org.clojure/clojure "1.11.0"]]
)
""",
)
sut.set_version(Version.from_str("1.1.5-SNAPSHOT").create_major())
assert '\n(defproject org.domaindrivenarchitecture/c4k-jira "2.0.0"\n:dependencies [[org.clojure/clojure "1.11.0"]]\n)\n ' == sut.content

View file

@ -0,0 +1,99 @@
import pytest
from pathlib import Path
from src.main.python.ddadevops.domain import (
DnsRecord,
BuildType,
C4k
)
from .helper import build_devops
def test_creation():
sut = build_devops({})
assert BuildType.C4K in sut.specialized_builds
def test_c4k_should_calculate_config():
sut = build_devops({})
with pytest.raises(Exception):
sut.specialized_builds[BuildType.C4K].config()
sut = build_devops({})
c4k = sut.specialized_builds[BuildType.C4K]
c4k.update_runtime_config(DnsRecord("fqdn", ipv6="::1"))
assert {
"fqdn": "fqdn",
"mon-cfg": {
"cluster-name": "module",
"cluster-stage": "test",
"grafana-cloud-url": "https://prometheus-prod-01-eu-west-0.grafana.net/api/prom/push",
},
} == c4k.config()
sut = build_devops(
{
"c4k_config": {"test": "test"},
}
)
c4k = sut.specialized_builds[BuildType.C4K]
c4k.update_runtime_config(DnsRecord("fqdn", ipv6="::1"))
assert {
"test": "test",
"fqdn": "fqdn",
"mon-cfg": {
"cluster-name": "module",
"cluster-stage": "test",
"grafana-cloud-url": "https://prometheus-prod-01-eu-west-0.grafana.net/api/prom/push",
},
} == c4k.config()
def test_c4k_should_calculate_auth():
sut = build_devops({})
c4k = sut.specialized_builds[BuildType.C4K]
assert {
"mon-auth": {"grafana-cloud-password": "password", "grafana-cloud-user": "user"}
} == c4k.auth()
sut = build_devops(
{
"c4k_auth": {"test": "test"},
}
)
c4k = sut.specialized_builds[BuildType.C4K]
assert {
"test": "test",
"mon-auth": {
"grafana-cloud-password": "password",
"grafana-cloud-user": "user",
},
} == c4k.auth()
def test_c4k_build_should_calculate_command():
sut = build_devops(
{
"project_root_path": ".",
}
)
assert (
"c4k-module-standalone.jar "
+ "./target/name/module/out_c4k_config.yaml "
+ "./target/name/module/out_c4k_auth.yaml > "
+ "./target/name/module/out_module.yaml"
== sut.specialized_builds[BuildType.C4K].command(sut)
)
sut = build_devops(
{
"project_root_path": ".",
"c4k_executable_name": "executable_name",
}
)
assert (
"c4k-executable_name-standalone.jar "
+ "./target/name/module/out_c4k_config.yaml "
+ "./target/name/module/out_c4k_auth.yaml > "
+ "./target/name/module/out_module.yaml"
== sut.specialized_builds[BuildType.C4K].command(sut)
)

View file

@ -0,0 +1,65 @@
from pybuilder.core import Project
from pathlib import Path
from src.main.python.ddadevops.domain import (
Validateable,
DnsRecord,
Devops,
BuildType,
Version,
ReleaseType,
Release,
)
from src.main.python.ddadevops.domain.image import Image
from .helper import build_devops
class MockValidateable(Validateable):
def __init__(self, value):
self.field = value
def validate(self):
return self.__validate_is_not_empty__("field")
def test_should_validate_non_empty_strings():
sut = MockValidateable("content")
assert sut.is_valid()
sut = MockValidateable(None)
assert not sut.is_valid()
sut = MockValidateable("")
assert not sut.is_valid()
def test_should_validate_non_empty_others():
sut = MockValidateable(1)
assert sut.is_valid()
sut = MockValidateable(1.0)
assert sut.is_valid()
sut = MockValidateable(True)
assert sut.is_valid()
sut = MockValidateable(None)
assert not sut.is_valid()
def test_validate_with_reason():
sut = MockValidateable(None)
assert sut.validate()[0] == "Field 'field' must not be None."
def test_should_validate_DnsRecord():
sut = DnsRecord(None)
assert not sut.is_valid()
sut = DnsRecord("name")
assert not sut.is_valid()
sut = DnsRecord("name", ipv4="1.2.3.4")
assert sut.is_valid()
sut = DnsRecord("name", ipv6="1::")
assert sut.is_valid()

View file

@ -0,0 +1,161 @@
import pytest
from pathlib import Path
from src.main.python.ddadevops.domain import (
CredentialMapping,
Credentials,
GopassType,
MixinType,
)
from .helper import build_devops
def test_should_create_mapping():
sut = CredentialMapping(
{
"gopass_path": "server/meissa/grafana-cloud",
"gopass_field": "grafana-cloud-user",
}
)
assert "grafana_cloud_user" == sut.name_for_input()
assert "GRAFANA_CLOUD_USER" == sut.name_for_environment()
assert GopassType.FIELD == sut.gopass_type()
sut = CredentialMapping(
{
"gopass_path": "server/meissa/grafana-cloud",
"name": "grafana_cloud_password",
}
)
assert "grafana_cloud_password" == sut.name_for_input()
assert "GRAFANA_CLOUD_PASSWORD" == sut.name_for_environment()
assert GopassType.PASSWORD == sut.gopass_type()
sut = CredentialMapping(
{
"gopass_path": "server/meissa/grafana-cloud",
"gopass_field": "grafana-cloud-user",
"name": "gfc_user",
}
)
assert "gfc_user" == sut.name_for_input()
assert "GFC_USER" == sut.name_for_environment()
assert GopassType.FIELD == sut.gopass_type()
def test_should_validate_CredentialMapping():
sut = CredentialMapping(
{
"gopass_path": "server/meissa/grafana-cloud",
"gopass_field": "grafana-cloud-user",
}
)
assert sut.is_valid()
sut = CredentialMapping(
{
"gopass_path": "server/meissa/grafana-cloud",
"name": "grafana_cloud_user",
}
)
assert sut.is_valid()
sut = CredentialMapping(
{
"gopass_path": "server/meissa/grafana-cloud",
}
)
assert not sut.is_valid()
def test_should_create_credentials():
sut = Credentials(
{
"credentials_mapping": [
{
"gopass_path": "server/meissa/grafana-cloud",
"gopass_field": "grafana-cloud-user",
},
{
"gopass_path": "server/meissa/grafana-cloud",
"name": "grafana_cloud_password",
},
],
}
)
assert sut
assert 2 == len(sut.mappings)
sut = Credentials(
{},
default_mappings=[
{
"gopass_path": "server/meissa/grafana-cloud",
"gopass_field": "grafana-cloud-user",
},
{
"gopass_path": "server/meissa/grafana-cloud",
"name": "grafana_cloud_password",
},
],
)
assert sut
assert 2 == len(sut.mappings)
sut = Credentials(
{
"credentials_mapping": [
{
"gopass_path": "dome/path",
"gopass_field": "some-field",
},
{
"gopass_path": "another_path",
"name": "grafana_cloud_password",
},
],
},
default_mappings=[
{
"gopass_path": "server/meissa/grafana-cloud",
"gopass_field": "grafana-cloud-user",
},
{
"gopass_path": "server/meissa/grafana-cloud",
"name": "grafana_cloud_password",
},
],
)
assert sut
assert 3 == len(sut.mappings)
assert sut.mappings["grafana_cloud_password"].gopass_path == "another_path"
def test_should_validate_credentials():
sut = Credentials(
{
"credentials_mapping": [
{
"gopass_path": "server/meissa/grafana-cloud",
"gopass_field": "grafana-cloud-user",
},
{
"gopass_path": "server/meissa/grafana-cloud",
"name": "grafana_cloud_password",
},
],
}
)
assert sut.is_valid()
sut = Credentials(
{
"credentials_mapping": [
{
"gopass_path": "server/meissa/grafana-cloud",
"gopass_field": "grafana-cloud-user",
},
{"gopass_path": "server/meissa/grafana-cloud"},
],
}
)
assert not sut.is_valid()

View file

@ -0,0 +1,9 @@
import pytest
from src.main.python.ddadevops.domain import (
Devops,
)
from .helper import build_devops
def test_devops_buildpath():
sut = build_devops({'module': "cloud", 'name': "meissa"})
assert "root_path/target/meissa/cloud" == sut.build_path()

View file

@ -0,0 +1,69 @@
import pytest
from src.main.python.ddadevops.domain import (
DevopsFactory,
Version,
BuildType,
MixinType,
)
def test_devops_factory():
with pytest.raises(Exception):
DevopsFactory().build_devops({"build_types": ["NOTEXISTING"]})
with pytest.raises(Exception):
DevopsFactory().build_devops(
{
"build_types": ["IMAGE"],
}
)
sut = DevopsFactory().build_devops(
{
"stage": "test",
"name": "mybuild",
"module": "test_image",
"project_root_path": "../../..",
"build_types": ["IMAGE"],
"mixin_types": [],
"image_dockerhub_user": "dockerhub_user",
"image_dockerhub_password": "dockerhub_password",
"image_tag": "docker_image_tag",
}
)
assert sut is not None
assert sut.specialized_builds[BuildType.IMAGE] is not None
sut = DevopsFactory().build_devops(
{
"stage": "test",
"name": "mybuild",
"module": "test_image",
"project_root_path": "../../..",
"build_types": ["C4K"],
"mixin_types": [],
"c4k_grafana_cloud_user": "user",
"c4k_grafana_cloud_password": "password",
},
Version.from_str("1.0.0"),
)
assert sut is not None
assert sut.specialized_builds[BuildType.C4K] is not None
sut = DevopsFactory().build_devops(
{
"stage": "test",
"name": "mybuild",
"module": "test_image",
"project_root_path": "../../..",
"build_types": [],
"mixin_types": ["RELEASE"],
"release_type": "NONE",
"release_main_branch": "main",
"release_current_branch": "my_feature",
"release_config_file": "project.clj",
},
Version.from_str("1.0.0"),
)
assert sut is not None
assert sut.mixins[MixinType.RELEASE] is not None

View file

@ -0,0 +1,14 @@
from pybuilder.core import Project
from pathlib import Path
from src.main.python.ddadevops.domain import (
BuildType,
)
from .helper import build_devops
def test_devops_build_commons_path():
sut = build_devops({})
image = sut.specialized_builds[BuildType.IMAGE]
assert image is not None
assert image.is_valid()
assert "docker/" == image.build_commons_path()

View file

@ -0,0 +1,58 @@
import pytest
from src.main.python.ddadevops.domain import (
InitService,
DevopsFactory,
Version,
MixinType,
BuildType,
)
from .helper import (
BuildFileRepositoryMock,
EnvironmentApiMock,
CredentialsApiMock,
GitApiMock,
devops_config,
)
def test_should_load_build_file():
sut = InitService(
DevopsFactory(),
BuildFileRepositoryMock(),
CredentialsApiMock({
"server/meissa/grafana-cloud:grafana-cloud-user": "gopass-gfc-user",
"server/meissa/grafana-cloud": "gopass-gfc-password",
}),
EnvironmentApiMock({}),
GitApiMock(),
)
assert (
Version.from_str("1.1.5-SNAPSHOT")
== sut.initialize(devops_config({})).mixins[MixinType.RELEASE].version
)
def test_should_resolve_passwords():
sut = InitService(
DevopsFactory(),
BuildFileRepositoryMock(),
CredentialsApiMock(
{
"server/meissa/grafana-cloud:grafana-cloud-user": "gopass-gfc-user",
"server/meissa/grafana-cloud": "gopass-gfc-password",
}
),
EnvironmentApiMock({"C4K_GRAFANA_CLOUD_USER": "env-gfc-user"}),
GitApiMock(),
)
config = devops_config({})
del config["c4k_grafana_cloud_user"]
del config["c4k_grafana_cloud_password"]
devops = sut.initialize(config)
c4k = devops.specialized_builds[BuildType.C4K]
assert {
"mon-auth": {
"grafana-cloud-password": "gopass-gfc-password",
"grafana-cloud-user": "env-gfc-user",
}
} == c4k.auth()

View file

@ -0,0 +1,77 @@
from pybuilder.core import Project
from pathlib import Path
from src.main.python.ddadevops.domain import (
BuildType,
Aws,
)
from .helper import devops_config
def test_aws_creation():
sut = Aws(
{
"module": "module",
"stage": "test",
"aws_account_name": "aws_account_name",
}
)
assert sut is not None
assert sut.is_valid()
sut = Aws(
{
"module": "module",
"stage": "test",
"aws_as_backend": True,
"aws_bucket": "bucket",
"aws_bucket_kms_key_id": "aws_bucket_kms_key_id",
}
)
assert sut is not None
assert sut.is_valid()
def test_should_calculate_backend_config():
sut = Aws(
devops_config(
{
"module": "dns_aws",
"stage": "prod",
"aws_bucket": "meissa-configuration",
"aws_bucket_kms_key_id": "arn:aws:kms:eu-central-1:907507348333:alias/meissa-configuration",
"aws_region": "eu-central-1",
}
)
)
assert {
"bucket": "meissa-configuration",
"key": "prod/dns_aws",
"kms_key_id": "arn:aws:kms:eu-central-1:907507348333:alias/meissa-configuration",
"region": "eu-central-1",
} == sut.backend_config()
def test_should_calculate_project_vars():
sut = Aws(
devops_config(
{
"aws_as_backend": False,
}
)
)
assert {} == sut.project_vars()
sut = Aws(
devops_config(
{
"aws_as_backend": True,
}
)
)
assert {
"account_name": "test",
"bucket": "bucket",
"key": "test/module",
"kms_key_id": "aws_bucket_kms_key_id",
"region": "region",
} == sut.project_vars()

View file

@ -0,0 +1,84 @@
from pybuilder.core import Project
from pathlib import Path
from src.main.python.ddadevops.domain import (
BuildType,
Digitalocean,
)
from .helper import devops_config
def test_digitalocean_creation():
sut = Digitalocean(
{
"module": "module",
"stage": "test",
"do_api_key": "api_key",
"do_spaces_access_id": "spaces_id",
"do_spaces_secret_key": "spaces_secret",
}
)
assert sut is not None
assert sut.is_valid()
sut = Digitalocean(
{
"module": "module",
"stage": "test",
"do_api_key": "api_key",
"do_spaces_access_id": "spaces_id",
"do_spaces_secret_key": "spaces_secret",
"do_as_backend": True,
"do_account_name": "account_name",
"do_endpoint": "endpoint",
"do_bucket": "bucket",
"do_bucket_key": "bucket_key",
"do_region": "region",
}
)
assert sut is not None
assert sut.is_valid()
def test_should_calculate_backend_config():
sut = Digitalocean(devops_config({}))
assert {
"access_key": "spaces_id",
"secret_key": "spaces_secret",
"endpoint": "endpoint",
"bucket": "bucket",
"key": "test/module",
"region": "region",
} == sut.backend_config()
def test_should_calculate_project_vars():
sut = Digitalocean(
devops_config(
{
"do_as_backend": False,
}
)
)
assert {
"do_api_key": "api_key",
"do_spaces_access_id": "spaces_id",
"do_spaces_secret_key": "spaces_secret",
} == sut.project_vars()
sut = Digitalocean(
devops_config(
{
"do_as_backend": True,
}
)
)
assert {
"do_api_key": "api_key",
"do_spaces_access_id": "spaces_id",
"do_spaces_secret_key": "spaces_secret",
"account_name": "test",
"endpoint": "endpoint",
"bucket": "bucket",
"key": "test/module",
"region": "region",
} == sut.project_vars()

View file

@ -0,0 +1,19 @@
from pybuilder.core import Project
from pathlib import Path
from src.main.python.ddadevops.domain import (
BuildType,
Hetzner,
)
from .helper import devops_config
def test_hetzner_creation():
sut = Hetzner(
devops_config(
{
"hetzner_api_key": "api_key",
}
)
)
assert sut is not None
assert sut.is_valid()

View file

@ -0,0 +1,33 @@
import pytest
from pathlib import Path
from src.main.python.ddadevops.domain import DnsRecord, BuildType, K3s
from .helper import build_devops
def test_creation():
sut = build_devops({})
assert BuildType.K3S in sut.specialized_builds
assert sut.specialized_builds[BuildType.K3S]
def test_should_calculate_provs_config():
sut = build_devops({}).specialized_builds[BuildType.K3S]
sut.update_runtime_config(DnsRecord("example.org", ipv6="::1"))
assert "fqdn:" in sut.provs_config()
assert not "$" in sut.provs_config()
def test_should_calculate_command():
devops = build_devops({})
sut = devops.specialized_builds[BuildType.K3S]
sut.update_runtime_config(DnsRecord("example.org", ipv6="::1"))
assert (
"provs-server.jar "
+ "k3s "
+ "k3s_provision_user@example.org "
+ "-c "
+ "root_path/target/name/module/out_k3sServerConfig.yaml "
+ "-a "
+ "root_path/target/name/module/k3s_app.yaml"
== sut.command(devops)
)

View file

@ -0,0 +1,63 @@
from pybuilder.core import Project
from pathlib import Path
from src.main.python.ddadevops.domain import (
Validateable,
DnsRecord,
Devops,
BuildType,
MixinType,
Version,
ReleaseType,
Release,
Image,
)
from .helper import build_devops, devops_config
def test_sould_validate_release():
sut = Release(
devops_config(
{
"release_type": "MINOR",
"release_current_branch": "main",
}
),
Version.from_str("1.3.1-SNAPSHOT"),
)
assert sut.is_valid()
sut = Release(
devops_config(
{
"release_type": "MINOR",
"release_current_branch": "some-feature-branch",
}
),
Version.from_str("1.3.1-SNAPSHOT"),
)
assert not sut.is_valid()
sut = Release(
devops_config(
{
"release_primary_build_file": 1,
}
),
Version.from_str("1.3.1-SNAPSHOT"),
)
assert not sut.is_valid()
def test_sould_validate_release():
sut = Release(
devops_config(
{
"release_type": "MINOR",
"release_current_branch": "main",
"release_primary_build_file": "project.clj",
"release_secondary_build_files": ["package.json"],
}
),
Version.from_str("1.3.1-SNAPSHOT"),
)
assert ["project.clj", "package.json"] == sut.build_files()

View file

@ -0,0 +1,186 @@
import pytest
from pathlib import Path
from src.main.python.ddadevops.domain import (
DnsRecord,
BuildType,
ProviderType,
TerraformDomain,
)
from .helper import build_devops, devops_config
def test_creation():
devops = build_devops({})
sut = devops.specialized_builds[BuildType.TERRAFORM]
assert BuildType.TERRAFORM in devops.specialized_builds
assert sut
assert sut.providers[ProviderType.DIGITALOCEAN]
assert sut.providers[ProviderType.HETZNER]
assert sut.providers[ProviderType.AWS]
def test_should_calculate_output_json_name():
config = devops_config({})
sut = TerraformDomain(config)
assert "the_out.json" == sut.output_json_name()
config = devops_config({})
del config["tf_output_json_name"]
sut = TerraformDomain(config)
assert "out_module.json" == sut.output_json_name()
def test_should_validate():
config = devops_config({})
sut = TerraformDomain(config)
assert sut.is_valid()
config = devops_config(
{
"do_api_key": "",
}
)
sut = TerraformDomain(config)
assert not sut.is_valid()
config = devops_config(
{
"aws_account_name": "",
}
)
sut = TerraformDomain(config)
assert not sut.is_valid()
def test_should_calculate_terraform_build_commons_path():
config = devops_config({})
del config["tf_build_commons_path"]
del config["tf_build_commons_dir_name"]
sut = TerraformDomain(config)
assert Path("terraform") == sut.terraform_build_commons_path()
config = devops_config({})
sut = TerraformDomain(config)
assert Path("build_commons_path/terraform") == sut.terraform_build_commons_path()
def test_should_calculate_project_vars():
config = devops_config(
{
"tf_provider_types": [],
}
)
sut = TerraformDomain(config)
assert {"module": "module", "stage": "test"} == sut.project_vars()
config = devops_config(
{
"do_as_backend": False,
"aws_as_backend": False,
}
)
sut = TerraformDomain(config)
assert {
"module": "module",
"stage": "test",
"do_api_key": "api_key",
"do_spaces_access_id": "spaces_id",
"do_spaces_secret_key": "spaces_secret",
"hetzner_api_key": "hetzner_api_key",
} == sut.project_vars()
def test_should_calculate_resources_from_package():
config = devops_config(
{
"tf_provider_types": [],
}
)
sut = TerraformDomain(config)
assert {"versions.tf", "terraform_build_vars.tf"} == sut.resources_from_package()
config = devops_config(
{
"tf_provider_types": ["DIGITALOCEAN"],
"do_as_backend": False,
}
)
sut = TerraformDomain(config)
assert {
"versions.tf",
"terraform_build_vars.tf",
"provider_registry.tf",
"do_provider.tf",
"do_mixin_vars.tf",
} == sut.resources_from_package()
sut = TerraformDomain(
devops_config(
{
"tf_provider_types": ["DIGITALOCEAN"],
"do_as_backend": True,
}
)
)
assert {
"versions.tf",
"terraform_build_vars.tf",
"provider_registry.tf",
"do_provider.tf",
"do_mixin_vars.tf",
"do_backend_properties_vars.tf",
"do_backend_with_properties.tf",
} == sut.resources_from_package()
config = devops_config({"tf_provider_types": ["HETZNER"]})
sut = TerraformDomain(config)
assert {
"versions.tf",
"terraform_build_vars.tf",
"provider_registry.tf",
"hetzner_provider.tf",
"hetzner_mixin_vars.tf",
} == sut.resources_from_package()
config = devops_config(
{
"tf_additional_resources_from_package": {"my.file"},
"do_as_backend": False,
}
)
sut = TerraformDomain(config)
assert {
"versions.tf",
"terraform_build_vars.tf",
"provider_registry.tf",
"do_provider.tf",
"do_mixin_vars.tf",
"provider_registry.tf",
"hetzner_provider.tf",
"hetzner_mixin_vars.tf",
"aws_backend_with_properties.tf",
"aws_provider.tf",
"aws_backend_properties_vars.tf",
"my.file",
} == sut.resources_from_package()
def test_should_calculate_local_state_handling():
sut = TerraformDomain(
devops_config(
{
"tf_provider_types": [],
}
)
)
assert sut.is_local_state()
sut = TerraformDomain(
devops_config(
{
"tf_provider_types": ["DIGITALOCEAN"],
"do_as_backend": True,
}
)
)
assert not sut.is_local_state()

View file

@ -0,0 +1,114 @@
from pybuilder.core import Project
from pathlib import Path
from src.main.python.ddadevops.domain import (
Version,
ReleaseType,
Image,
)
from .helper import build_devops, devops_config
def test_version_creation():
sut = Version.from_str("1.2.3")
assert sut.to_string() == "1.2.3"
assert sut.version_list == [1, 2, 3]
assert sut.is_snapshot() == False
sut = Version.from_str("1.2.3-SNAPSHOT")
assert sut.to_string() == "1.2.3-SNAPSHOT"
assert sut.version_list == [1, 2, 3]
assert sut.is_snapshot() == True
def test_should_validate_version_list():
sut = Version(None)
assert not sut.is_valid()
sut = Version([])
assert not sut.is_valid()
sut = Version([1, 2])
assert not sut.is_valid()
sut = Version([1, 2, 3])
assert sut.is_valid()
def test_should_validate_parsing():
sut = Version.from_str("1.2")
assert not sut.is_valid()
sut = Version.from_str("1.2.3")
sut.version_list = [2, 2, 2]
assert not sut.is_valid()
sut = Version.from_str("1.2.3")
assert sut.is_valid()
sut = Version.from_str("1.2.3-SNAPSHOT")
assert sut.is_valid()
sut = Version.from_str("1.2.3-dev")
assert sut.is_valid()
def test_should_create_patch():
version = Version.from_str("1.2.3-SNAPSHOT")
sut = version.create_patch()
assert sut.to_string() == "1.2.3"
assert version.to_string() == "1.2.3-SNAPSHOT"
version = Version.from_str("1.2.3")
sut = version.create_patch()
assert sut.to_string() == "1.2.4"
assert version.to_string() == "1.2.3"
def test_should_create_minor():
version = Version.from_str("1.2.3-SNAPSHOT")
sut = version.create_minor()
assert sut.to_string() == "1.3.0"
version = Version.from_str("1.2.3")
sut = version.create_minor()
assert sut.to_string() == "1.3.0"
version = Version.from_str("1.3.0-SNAPSHOT")
sut = version.create_minor()
assert sut.to_string() == "1.3.0"
version = Version.from_str("1.3.0")
sut = version.create_minor()
assert sut.to_string() == "1.4.0"
def test_should_create_major():
version = Version.from_str("1.2.3-SNAPSHOT")
sut = version.create_major()
assert sut.to_string() == "2.0.0"
version = Version.from_str("1.2.3")
sut = version.create_major()
assert sut.to_string() == "2.0.0"
version = Version.from_str("1.0.0-SNAPSHOT")
sut = version.create_major()
assert sut.to_string() == "1.0.0"
version = Version.from_str("1.0.0")
sut = version.create_major()
assert sut.to_string() == "2.0.0"
def test_should_create_bump():
version = Version.from_str("1.2.3-SNAPSHOT")
sut = version.create_bump()
assert sut.to_string() == "1.2.3-SNAPSHOT"
version = Version.from_str("1.2.3")
sut = version.create_bump("SNAPSHOT")
assert sut.to_string() == "1.2.4-SNAPSHOT"
version = Version.from_str("1.0.0")
sut = version.create_bump("SNAPSHOT")
assert sut.to_string() == "1.0.1-SNAPSHOT"

View file

@ -0,0 +1,8 @@
from pathlib import Path
from src.main.python.ddadevops.infrastructure import ExecutionApi
def copy_resource(source: Path, target: Path):
api = ExecutionApi()
res_source = Path('src/test/resources/').joinpath(source)
api.execute(f"cp {str(res_source)} {str(target)}")

View file

@ -0,0 +1,42 @@
import os
from pybuilder.core import Project
from src.main.python.ddadevops.domain import DnsRecord
from src.main.python.ddadevops.c4k_build import C4kBuild
from .domain.helper import (
CredentialsApiMock,
devops_config,
)
def test_c4k_build(tmp_path):
str_tmp_path = str(tmp_path)
project = Project(str_tmp_path, name="name")
os.environ["C4K_GRAFANA_CLOUD_USER"] = "user"
os.environ["C4K_GRAFANA_CLOUD_PASSWORD"] = "password"
sut = C4kBuild(
project,
devops_config(
{
"project_root_path": str_tmp_path,
"mixin_types": [],
"build_types": ["C4K"],
"module": "c4k-test",
"c4k_config": {"a": 1, "b": 2},
"c4k_auth": {"c": 3, "d": 4},
"c4k_grafana_cloud_user": "user",
"c4k_grafana_cloud_password": "password",
}
),
)
sut.initialize_build_dir()
assert sut.build_path() == f"{str_tmp_path}/target/name/c4k-test"
sut.update_runtime_config(DnsRecord("test.de", ipv6="::1"))
sut.write_c4k_config()
assert os.path.exists(f"{sut.build_path()}/out_c4k_config.yaml")
sut.write_c4k_auth()
assert os.path.exists(f"{sut.build_path()}/out_c4k_auth.yaml")

View file

@ -0,0 +1,25 @@
import os
from pathlib import Path
from pybuilder.core import Project
from src.main.python.ddadevops import DevopsBuild
from .domain.helper import devops_config
from .resource_helper import copy_resource
def test_devops_build(tmp_path):
str_tmp_path = str(tmp_path)
copy_resource(Path("package.json"), tmp_path)
project = Project(str_tmp_path, name="name")
devops_build = DevopsBuild(
project,
devops_config(
{
"project_root_path": str_tmp_path,
"build_types": [],
"mixin_types": [],
}
),
)
devops_build.initialize_build_dir()
assert os.path.exists(f"{devops_build.build_path()}")

View file

@ -0,0 +1,24 @@
import os
from pybuilder.core import Project
from src.main.python.ddadevops import DevopsImageBuild
from .domain.helper import devops_config
def test_devops_docker_build(tmp_path):
str_tmp_path = str(tmp_path)
project = Project(str_tmp_path, name="name")
os.environ["IMAGE_DOCKERHUB_USER"] = "user"
os.environ["IMAGE_DOCKERHUB_PASSWORD"] = "password"
image_build = DevopsImageBuild(
project,
devops_config(
{
"project_root_path": str_tmp_path,
"build_types": ["IMAGE"],
"mixin_types": [],
}
),
)
assert image_build

View file

@ -0,0 +1,30 @@
import pytest as pt
import os
from pathlib import Path
from pybuilder.core import Project
from src.main.python.ddadevops.release_mixin import ReleaseMixin
from src.main.python.ddadevops.domain import Devops, Release
from .domain.helper import devops_config
from .resource_helper import copy_resource
def test_release_mixin(tmp_path):
str_tmp_path = str(tmp_path)
copy_resource(Path("package.json"), tmp_path)
project = Project(str_tmp_path, name="name")
sut = ReleaseMixin(
project,
devops_config(
{
"project_root_path": str_tmp_path,
"mixin_types": ["RELEASE"],
"build_types": [],
"module": "release-test",
}
),
)
sut.initialize_build_dir()
assert sut.build_path() == f"{str_tmp_path}/target/name/release-test"

View file

@ -0,0 +1,5 @@
(defproject org.domaindrivenarchitecture/c4k-website "1.1.3"
:description "website c4k-installation package"
:url "https://domaindrivenarchitecture.org"
:license {:name "Apache License, Version 2.0"
:url "https://www.apache.org/licenses/LICENSE-2.0.html"})

View file

@ -0,0 +1,2 @@
version = "12.4.678"

View file

@ -0,0 +1,3 @@
{
"version": "123.123.456"
}

View file

@ -0,0 +1,78 @@
# dda_devops_build
# Copyright 2019 meissa GmbH.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pybuilder.core import init, use_plugin, Author
use_plugin("python.core")
use_plugin("copy_resources")
use_plugin("filter_resources")
#use_plugin("python.unittest")
#use_plugin("python.coverage")
use_plugin("python.distutils")
#use_plugin("python.install_dependencies")
default_task = "publish"
name = "ddadevops"
version = "3.1.3"
summary = "tools to support builds combining gopass, terraform, dda-pallet, aws & hetzner-cloud"
description = __doc__
authors = [Author("meissa GmbH", "buero@meissa-gmbh.de")]
url = "https://github.com/DomainDrivenArchitecture/dda-devops-build"
requires_python = ">=2.7,!=3.0,!=3.1,!=3.2,!=3.3,!=3.4" # CHECK IF NEW VERSION EXISTS
license = "Apache Software License"
@init
def initialize(project):
#project.build_depends_on('mockito')
#project.build_depends_on('unittest-xml-reporting')
project.set_property("verbose", True)
project.get_property("filter_resources_glob").append("main/python/ddadevops/__init__.py")
#project.set_property("dir_source_unittest_python", "src/unittest/python")
project.set_property("copy_resources_target", "$dir_dist/ddadevops")
project.get_property("copy_resources_glob").append("LICENSE")
project.get_property("copy_resources_glob").append("src/main/resources/terraform/*")
project.get_property("copy_resources_glob").append("src/main/resources/docker/image/resources/*")
project.include_file("ddadevops", "LICENSE")
project.include_file("ddadevops", "src/main/resources/terraform/*")
project.include_file("ddadevops", "src/main/resources/docker/image/resources/*")
#project.set_property('distutils_upload_sign', True)
#project.set_property('distutils_upload_sign_identity', '')
project.set_property("distutils_readme_description", True)
project.set_property("distutils_description_overwrite", True)
project.set_property("distutils_classifiers", [
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Operating System :: POSIX :: Linux',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing'
])

View file

@ -0,0 +1,33 @@
{
"name": "c4k-jira",
"description": "Generate c4k yaml for a jira deployment.",
"author": "meissa GmbH",
"version": "1.1.5-SNAPSHOT",
"homepage": "https://gitlab.com/domaindrivenarchitecture/c4k-jira#readme",
"repository": "https://www.npmjs.com/package/c4k-jira",
"license": "APACHE2",
"main": "c4k-jira.js",
"bin": {
"c4k-jira": "./c4k-jira.js"
},
"keywords": [
"cljs",
"jira",
"k8s",
"c4k",
"deployment",
"yaml",
"convention4kubernetes"
],
"bugs": {
"url": "https://gitlab.com/domaindrivenarchitecture/c4k-jira/issues"
},
"dependencies": {
"js-base64": "^3.6.1",
"js-yaml": "^4.0.0"
},
"devDependencies": {
"shadow-cljs": "^2.11.18",
"source-map-support": "^0.5.19"
}
}

View file

@ -0,0 +1,47 @@
(defproject org.domaindrivenarchitecture/c4k-jira "1.1.5-SNAPSHOT"
:description "jira c4k-installation package"
:url "https://domaindrivenarchitecture.org"
:license {:name "Apache License, Version 2.0"
:url "https://www.apache.org/licenses/LICENSE-2.0.html"}
:dependencies [[org.clojure/clojure "1.11.1"]
[org.clojure/tools.reader "1.3.6"]
[org.domaindrivenarchitecture/c4k-common-clj "2.0.3"]
[hickory "0.7.1"]]
:target-path "target/%s/"
:source-paths ["src/main/cljc"
"src/main/clj"]
:resource-paths ["src/main/resources"]
:repositories [["snapshots" :clojars]
["releases" :clojars]]
:deploy-repositories [["snapshots" {:sign-releases false :url "https://clojars.org/repo"}]
["releases" {:sign-releases false :url "https://clojars.org/repo"}]]
:profiles {:test {:test-paths ["src/test/cljc"]
:resource-paths ["src/test/resources"]
:dependencies [[dda/data-test "0.1.1"]]}
:dev {:plugins [[lein-shell "0.5.0"]]}
:uberjar {:aot :all
:main dda.c4k-jira.uberjar
:uberjar-name "c4k-jira-standalone.jar"
:dependencies [[org.clojure/tools.cli "1.0.214"]
[ch.qos.logback/logback-classic "1.4.5"
:exclusions [com.sun.mail/javax.mail]]
[org.slf4j/jcl-over-slf4j "2.0.6"]]}}
:release-tasks [["test"]
["vcs" "assert-committed"]
["change" "version" "leiningen.release/bump-version" "release"]
["vcs" "commit"]
["vcs" "tag" "v" "--no-sign"]
["change" "version" "leiningen.release/bump-version"]]
:aliases {"native" ["shell"
"native-image"
"--report-unsupported-elements-at-runtime"
"--initialize-at-build-time"
"-jar" "target/uberjar/c4k-jira-standalone.jar"
"-H:ResourceConfigurationFiles=graalvm-resource-config.json"
"-H:Log=registerResource"
"-H:Name=target/graalvm/${:name}"]
"inst" ["shell" "sudo"
"install"
"-m=755"
"target/uberjar/c4k-jira-standalone.jar"
"/usr/local/bin/c4k-jira-standalone.jar"]})

View file

@ -1,48 +0,0 @@
import os
from pybuilder.core import Project
from src.main.python.ddadevops.c4k_mixin import C4kMixin, add_c4k_mixin_config
class MyC4kMixin(C4kMixin):
pass
def test_c4k_mixin(tmp_path):
build_dir = 'build'
project_name = 'testing-project'
module_name = 'c4k-test'
tmp_path_str = str(tmp_path)
project = Project(tmp_path_str, name=project_name)
project_config = {
'stage': 'test',
'project_root_path': tmp_path_str,
'module': module_name,
'build_dir_name': build_dir
}
config = {'a': 1, 'b': 2}
auth = {'c': 3, 'd': 4}
add_c4k_mixin_config(project_config, module_name, config, auth, grafana_cloud_user='user', grafana_cloud_password='password')
assert project_config.get('C4kMixin') is not None
assert project_config.get('C4kMixin').get('Name') is module_name
assert project_config.get('C4kMixin').get('Config') is config
assert project_config.get('C4kMixin').get('Auth') is auth
mixin = MyC4kMixin(project, project_config)
mixin.initialize_build_dir()
assert mixin.build_path() == f'{tmp_path_str}/{build_dir}/{project_name}/{module_name}'
mixin.put('fqdn', 'testing.test')
mixin.write_c4k_config()
assert 'fqdn' in mixin.c4k_mixin_config
assert 'mon-cfg' in mixin.c4k_mixin_config
assert os.path.exists(f'{mixin.build_path()}/out_c4k_config.yaml')
mixin.write_c4k_auth()
assert 'mon-auth' in mixin.c4k_mixin_auth
assert os.path.exists(f'{mixin.build_path()}/out_c4k_auth.yaml')