Merge branch 'implement-c4k-mixin' into 'main'

add c4k-mixin

See merge request domaindrivenarchitecture/dda-devops-build!5
This commit is contained in:
Michael Jerger 2022-12-30 13:05:15 +00:00
commit aa633fa6af
21 changed files with 289 additions and 180 deletions

View file

@ -6,31 +6,34 @@ before_script:
- pip install -r requirements.txt - pip install -r requirements.txt
stages: stages:
- lint - lint&test
- upload - upload
- image - image
flake8: flake8:
stage: lint stage: lint&test
allow_failure: true
script: script:
- pip install -r dev_requirements.txt - pip install -r dev_requirements.txt
- flake8 --max-line-length=120 --count --select=E9,F63,F7,F82 --show-source --statistics src/main/python/ddadevops/*.py - flake8 --max-line-length=120 --count --select=E9,F63,F7,F82 --show-source --statistics src/main/python/ddadevops/*.py
- flake8 --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics src/main/python/ddadevops/*.py - flake8 --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics src/main/python/ddadevops/*.py
mypy: mypy:
stage: lint stage: lint&test
allow_failure: true
script: script:
- pip install -r dev_requirements.txt - pip install -r dev_requirements.txt
- python -m mypy src/main/python/ddadevops/*.py --ignore-missing-imports - python -m mypy src/main/python/ddadevops/*.py --ignore-missing-imports
pylint: pylint:
stage: lint stage: lint&test
allow_failure: true
script: script:
- pip install -r dev_requirements.txt - pip install -r dev_requirements.txt
- pylint -d C0301 src/main/python/ddadevops/*.py - pylint -d C0301,W0614,R0201,C0114,C0115,C0116,similarities,W0702,W0702,R0913,R0902,R0914,R1732 src/main/python/ddadevops/*.py
pytest:
stage: lint&test
script:
- pip install -r dev_requirements.txt
- pytest
pypi-stable: pypi-stable:
stage: upload stage: upload

View file

@ -28,7 +28,7 @@ use_plugin("python.distutils")
default_task = "publish" default_task = "publish"
name = "ddadevops" name = "ddadevops"
version = "3.0.2" version = "3.0.18"
summary = "tools to support builds combining gopass, terraform, dda-pallet, aws & hetzner-cloud" summary = "tools to support builds combining gopass, terraform, dda-pallet, aws & hetzner-cloud"
description = __doc__ description = __doc__
authors = [Author("meissa GmbH", "buero@meissa-gmbh.de")] authors = [Author("meissa GmbH", "buero@meissa-gmbh.de")]

View file

@ -9,3 +9,4 @@ pylint==2.15.8
pytest==7.2 pytest==7.2
pytest-cov==4.0 pytest-cov==4.0
pytest-datafiles==2.0 pytest-datafiles==2.0
types-setuptools==65.6.0.2

0
src/__init__.py Normal file
View file

0
src/main/__init__.py Normal file
View file

View file

View file

@ -1,5 +1,5 @@
""" """
ddadevops provide tools to support builds combining gopass, ddadevops provide tools to support builds combining gopass,
terraform, dda-pallet, aws & hetzner-cloud. terraform, dda-pallet, aws & hetzner-cloud.
""" """
@ -9,6 +9,7 @@ from .provs_k3s_mixin import ProvsK3sMixin, add_provs_k3s_mixin_config
from .aws_rds_pg_mixin import AwsRdsPgMixin, add_aws_rds_pg_mixin_config from .aws_rds_pg_mixin import AwsRdsPgMixin, add_aws_rds_pg_mixin_config
from .aws_mfa_mixin import AwsMfaMixin, add_aws_mfa_mixin_config from .aws_mfa_mixin import AwsMfaMixin, add_aws_mfa_mixin_config
from .aws_backend_properties_mixin import AwsBackendPropertiesMixin, add_aws_backend_properties_mixin_config from .aws_backend_properties_mixin import AwsBackendPropertiesMixin, add_aws_backend_properties_mixin_config
from .c4k_mixin import C4kMixin, add_c4k_mixin_config
from .exoscale_mixin import ExoscaleMixin, add_exoscale_mixin_config from .exoscale_mixin import ExoscaleMixin, add_exoscale_mixin_config
from .digitalocean_backend_properties_mixin import DigitaloceanBackendPropertiesMixin, add_digitalocean_backend_properties_mixin_config from .digitalocean_backend_properties_mixin import DigitaloceanBackendPropertiesMixin, add_digitalocean_backend_properties_mixin_config
from .digitalocean_terraform_build import DigitaloceanTerraformBuild, create_digitalocean_terraform_build_config from .digitalocean_terraform_build import DigitaloceanTerraformBuild, create_digitalocean_terraform_build_config
@ -16,6 +17,6 @@ from .hetzner_mixin import HetznerMixin, add_hetzner_mixin_config
from .devops_docker_build import DevopsDockerBuild, create_devops_docker_build_config from .devops_docker_build import DevopsDockerBuild, create_devops_docker_build_config
from .devops_terraform_build import DevopsTerraformBuild, create_devops_terraform_build_config from .devops_terraform_build import DevopsTerraformBuild, create_devops_terraform_build_config
from .devops_build import DevopsBuild, create_devops_build_config, get_devops_build, get_tag_from_latest_commit from .devops_build import DevopsBuild, create_devops_build_config, get_devops_build, get_tag_from_latest_commit
from .credential import gopass_credential_from_env_path, gopass_credential_from_path, gopass_password_from_path, gopass_field_from_path from .credential import gopass_password_from_path, gopass_field_from_path
__version__ = "${version}" __version__ = "${version}"

View file

@ -1,13 +1,11 @@
from dda_python_terraform import * from dda_python_terraform import Terraform
from .devops_terraform_build import DevopsTerraformBuild from .devops_terraform_build import DevopsTerraformBuild
def add_aws_backend_properties_mixin_config(config, account_name): def add_aws_backend_properties_mixin_config(config, account_name):
config.update({'AwsBackendPropertiesMixin': config.update({'AwsBackendPropertiesMixin':
{'account_name': account_name}}) {'account_name': account_name}})
return config return config
class AwsBackendPropertiesMixin(DevopsTerraformBuild): class AwsBackendPropertiesMixin(DevopsTerraformBuild):
def __init__(self, project, config): def __init__(self, project, config):
@ -38,14 +36,14 @@ class AwsBackendPropertiesMixin(DevopsTerraformBuild):
pass pass
def init_client(self): def init_client(self):
tf = Terraform(working_dir=self.build_path(), terraform_semantic_version=self.terraform_semantic_version) terraform = Terraform(working_dir=self.build_path(), terraform_semantic_version=self.terraform_semantic_version)
tf.init(backend_config=self.backend_config) terraform.init(backend_config=self.backend_config)
self.print_terraform_command(tf) self.print_terraform_command(terraform)
if self.use_workspace: if self.use_workspace:
try: try:
tf.workspace('select', self.stage) terraform.workspace('select', self.stage)
self.print_terraform_command(tf) self.print_terraform_command(terraform)
except: except:
tf.workspace('new', self.stage) terraform.workspace('new', self.stage)
self.print_terraform_command(tf) self.print_terraform_command(terraform)
return tf return terraform

View file

@ -1,10 +1,10 @@
from boto3 import * from boto3 import Session
from .python_util import execute from .python_util import execute
from .aws_backend_properties_mixin import AwsBackendPropertiesMixin from .aws_backend_properties_mixin import AwsBackendPropertiesMixin
def add_aws_mfa_mixin_config(config, account_id, region, def add_aws_mfa_mixin_config(config, account_id, region,
mfa_role='developer', mfa_account_prefix='', mfa_role='developer', mfa_account_prefix='',
mfa_login_account_suffix='main'): mfa_login_account_suffix='main'):
config.update({'AwsMfaMixin': config.update({'AwsMfaMixin':
{'account_id': account_id, {'account_id': account_id,
@ -39,13 +39,13 @@ class AwsMfaMixin(AwsBackendPropertiesMixin):
return ret return ret
def get_username_from_account(self, p_account_name): def get_username_from_account(self, p_account_name):
login_id = execute('cat ~/.aws/accounts | grep -A 2 "\[' + p_account_name + login_id = execute(r'cat ~/.aws/accounts | grep -A 2 "\[' + p_account_name +
'\]" | grep username | awk -F= \'{print $2}\'', shell=True) r'\]" | grep username | awk -F= \'{print $2}\'', shell=True)
return login_id return login_id
def get_account_id_from_account(self, p_account_name): def get_account_id_from_account(self, p_account_name):
account_id = execute('cat ~/.aws/accounts | grep -A 2 "\[' + p_account_name + account_id = execute(r'cat ~/.aws/accounts | grep -A 2 "\[' + p_account_name +
'\]" | grep account | awk -F= \'{print $2}\'', shell=True) r'\]" | grep account | awk -F= \'{print $2}\'', shell=True)
return account_id return account_id
def get_mfa(self, mfa_path='aws'): def get_mfa(self, mfa_path='aws'):
@ -56,7 +56,7 @@ class AwsMfaMixin(AwsBackendPropertiesMixin):
execute('aws configure --profile ' + to_profile + execute('aws configure --profile ' + to_profile +
' set ' + key + ' ' + secret, shell=True) ' set ' + key + ' ' + secret, shell=True)
def get_mfa_session(self, toke=None): def get_mfa_session(self):
from_account_name = self.mfa_account_prefix + self.mfa_login_account_suffix from_account_name = self.mfa_account_prefix + self.mfa_login_account_suffix
from_account_id = self.get_account_id_from_account(from_account_name) from_account_id = self.get_account_id_from_account(from_account_name)
to_account_name = self.mfa_account_prefix + self.account_name to_account_name = self.mfa_account_prefix + self.account_name

View file

@ -66,7 +66,7 @@ class AwsRdsPgMixin(DevopsBuild):
owned_by_wrong_user = self.execute_pg_rds_sql(superuser_name, superuser_password, owned_by_wrong_user = self.execute_pg_rds_sql(superuser_name, superuser_password,
"SELECT count(*) FROM pg_class c, pg_user u WHERE c.relowner = u.usesysid " + "SELECT count(*) FROM pg_class c, pg_user u WHERE c.relowner = u.usesysid " +
"and u.usename='" + to_deactivate_user_name + "';") "and u.usename='" + to_deactivate_user_name + "';")
if int(owned_by_wrong_user) > 0: if int(owned_by_wrong_user) > 0:
owned_objects = self.execute_pg_rds_sql(superuser_name, superuser_password, owned_objects = self.execute_pg_rds_sql(superuser_name, superuser_password,
"SELECT c.relname FROM pg_class c, pg_user u WHERE c.relowner = u.usesysid " + "SELECT c.relname FROM pg_class c, pg_user u WHERE c.relowner = u.usesysid " +
@ -110,8 +110,8 @@ class AwsRdsPgMixin(DevopsBuild):
alter_stmt = self.execute_pg_rds_sql(superuser_name, superuser_password, alter_objects) alter_stmt = self.execute_pg_rds_sql(superuser_name, superuser_password, alter_objects)
alter_stmt.strip() alter_stmt.strip()
if (alter_stmt != ''): if alter_stmt != '':
print('apply alter statements? \n', alter_stmt) print('apply alter statements? \n', alter_stmt)
proceed = input('\n[y/n] \n') proceed = input('\n[y/n] \n')
if(proceed == 'y'): if proceed == 'y':
self.execute_pg_rds_sql(superuser_name, superuser_password, alter_stmt) self.execute_pg_rds_sql(superuser_name, superuser_password, alter_stmt)

View file

@ -0,0 +1,66 @@
from os import chmod
from .python_util import execute
from .devops_build import DevopsBuild
from .credential import gopass_field_from_path, gopass_password_from_path
def add_c4k_mixin_config(config,
c4k_module_name,
c4k_config_dict,
c4k_auth_dict,
grafana_cloud_user=None,
grafana_cloud_password=None,
grafana_cloud_url='https://prometheus-prod-01-eu-west-0.grafana.net/api/prom/push'):
if not grafana_cloud_user:
grafana_cloud_user = gopass_field_from_path('server/meissa/grafana-cloud', 'grafana-cloud-user')
if not grafana_cloud_password:
grafana_cloud_password = gopass_password_from_path('server/meissa/grafana-cloud')
c4k_auth_dict.update({'mon-auth': {
'grafana-cloud-user': grafana_cloud_user,
'grafana-cloud-password': grafana_cloud_password
}})
c4k_config_dict.update({'mon-config': {
'grafana-cloud-url': grafana_cloud_url
}})
config.update({'C4kMixin': {'Config': c4k_config_dict,
'Auth': c4k_auth_dict,
'Name': c4k_module_name}})
return config
class C4kMixin(DevopsBuild):
def __init__(self, project, config):
super().__init__(project, config)
self.c4k_mixin_config = config['C4kMixin']['Config']
self.c4k_mixin_auth = config['C4kMixin']['Auth']
self.c4k_module_name = config['C4kMixin']['Name']
self.c4k_mixin_config.update({'mon-config': {
'cluster-name': f':{self.c4k_module_name}',
'cluster-stage': f':{self.stage}'}})
def __generate_clojure_map(self, template_dict):
clojure_map_str = '{'
for key, value in template_dict.items():
if isinstance(value, dict):
clojure_map_str += f':{key} {self.__generate_clojure_map(value)}\n'
else:
clojure_map_str += f':{key} "{value}"\n'
clojure_map_str += '}'
return clojure_map_str
def write_c4k_config(self):
fqdn = self.get('fqdn')
self.c4k_mixin_config.update({'fqdn':fqdn})
with open(self.build_path() + '/out_config.edn', 'w', encoding="utf-8") as output_file:
output_file.write(self.__generate_clojure_map(self.c4k_mixin_config))
def write_c4k_auth(self):
with open(self.build_path() + '/out_auth.edn', 'w', encoding="utf-8") as output_file:
output_file.write(self.__generate_clojure_map(self.c4k_mixin_auth))
chmod(self.build_path() + '/out_auth.edn', 0o600)
def c4k_apply(self):
cmd = f'c4k-{self.c4k_module_name}-standalone.jar {self.build_path()}/out_config.edn {self.build_path()}/out_auth.edn > {self.build_path()}/out_{self.c4k_module_name}.yaml'
output = execute(cmd, True)
print(output)
return output

View file

@ -1,17 +1,4 @@
from .python_util import * from .python_util import execute
import deprecation
@deprecation.deprecated(deprecated_in="0.5.0", removed_in="1.0",
details='use gopass_password_from_path(os.environ.get(env_var, None)) instead')
def gopass_credential_from_env_path (env_var):
env_path = os.environ.get(env_var, None)
return gopass_password_from_path(env_path)
@deprecation.deprecated(deprecated_in="0.5.0", removed_in="1.0",
details='use gopass_password_from_path(path) instead')
def gopass_credential_from_path (path):
return gopass_password_from_path(path)
def gopass_field_from_path (path, field): def gopass_field_from_path (path, field):
credential = None credential = None
@ -26,4 +13,3 @@ def gopass_password_from_path (path):
print('get password for: ' + path) print('get password for: ' + path)
credential = execute(['gopass', 'show', '--password', path]) credential = execute(['gopass', 'show', '--password', path])
return credential return credential

View file

@ -1,7 +1,6 @@
from subprocess import run from subprocess import run, CalledProcessError
from .python_util import filter_none from .python_util import filter_none
def create_devops_build_config(stage, project_root_path, module, def create_devops_build_config(stage, project_root_path, module,
build_dir_name='target'): build_dir_name='target'):
return {'stage': stage, return {'stage': stage,
@ -13,11 +12,12 @@ def get_devops_build(project):
return project.get_property('devops_build') return project.get_property('devops_build')
def get_tag_from_latest_commit(): def get_tag_from_latest_commit():
value = run('git describe --abbrev=0 --tags --exact-match', shell=True, try:
capture_output=True) value = run('git describe --abbrev=0 --tags --exact-match', shell=True,
if value.returncode != 0: capture_output=True, check=True)
return value.stdout.decode('UTF-8').rstrip()
except CalledProcessError:
return None return None
return value.stdout.decode('UTF-8').rstrip()
class DevopsBuild: class DevopsBuild:
@ -42,8 +42,8 @@ class DevopsBuild:
return '/'.join(filter_none(mylist)) return '/'.join(filter_none(mylist))
def initialize_build_dir(self): def initialize_build_dir(self):
run('rm -rf ' + self.build_path(), shell=True) run('rm -rf ' + self.build_path(), shell=True, check=True)
run('mkdir -p ' + self.build_path(), shell=True) run('mkdir -p ' + self.build_path(), shell=True, check=True)
def put(self, key, value): def put(self, key, value):
self.stack[key] = value self.stack[key] = value

View file

@ -1,9 +1,8 @@
from subprocess import run
from .python_util import filter_none
from pkg_resources import *
from .devops_build import DevopsBuild, create_devops_build_config
import sys import sys
from subprocess import run
from pkg_resources import resource_string
from .python_util import filter_none
from .devops_build import DevopsBuild, create_devops_build_config
def create_devops_docker_build_config(stage, def create_devops_docker_build_config(stage,
project_root_path, project_root_path,
@ -20,7 +19,7 @@ def create_devops_docker_build_config(stage,
ret.update({'dockerhub_user': dockerhub_user, ret.update({'dockerhub_user': dockerhub_user,
'dockerhub_password': dockerhub_password, 'dockerhub_password': dockerhub_password,
'use_package_common_files': use_package_common_files, 'use_package_common_files': use_package_common_files,
'docker_build_commons_dir_name': docker_build_commons_dir_name, 'docker_build_commons_dir_name': docker_build_commons_dir_name,
'build_commons_path': build_commons_path, 'build_commons_path': build_commons_path,
'docker_publish_tag': docker_publish_tag, }) 'docker_publish_tag': docker_publish_tag, })
return ret return ret
@ -44,10 +43,10 @@ class DevopsDockerBuild(DevopsBuild):
return '/'.join(filter_none(mylist)) + '/' return '/'.join(filter_none(mylist)) + '/'
def copy_build_resource_file_from_package(self, name): def copy_build_resource_file_from_package(self, name):
run('mkdir -p ' + self.build_path() + '/image/resources', shell=True) run('mkdir -p ' + self.build_path() + '/image/resources', shell=True, check=True)
my_data = resource_string( my_data = resource_string(
__name__, "src/main/resources/docker/" + name) __name__, "src/main/resources/docker/" + name)
with open(self.build_path() + '/' + name, "w") as output_file: with open(self.build_path() + '/' + name, "w", encoding="utf-8") as output_file:
output_file.write(my_data.decode(sys.stdout.encoding)) output_file.write(my_data.decode(sys.stdout.encoding))
def copy_build_resources_from_package(self): def copy_build_resources_from_package(self):
@ -56,7 +55,7 @@ class DevopsDockerBuild(DevopsBuild):
def copy_build_resources_from_dir(self): def copy_build_resources_from_dir(self):
run('cp -f ' + self.docker_build_commons_path() + run('cp -f ' + self.docker_build_commons_path() +
'* ' + self.build_path(), shell=True) '* ' + self.build_path(), shell=True, check=True)
def initialize_build_dir(self): def initialize_build_dir(self):
super().initialize_build_dir() super().initialize_build_dir()
@ -64,34 +63,34 @@ class DevopsDockerBuild(DevopsBuild):
self.copy_build_resources_from_package() self.copy_build_resources_from_package()
else: else:
self.copy_build_resources_from_dir() self.copy_build_resources_from_dir()
run('cp -r image ' + self.build_path(), shell=True) run('cp -r image ' + self.build_path(), shell=True, check=True)
run('cp -r test ' + self.build_path(), shell=True) run('cp -r test ' + self.build_path(), shell=True, check=True)
def image(self): def image(self):
run('docker build -t ' + self.name() + run('docker build -t ' + self.name() +
' --file ' + self.build_path() + '/image/Dockerfile ' ' --file ' + self.build_path() + '/image/Dockerfile '
+ self.build_path() + '/image', shell=True) + self.build_path() + '/image', shell=True, check=True)
def drun(self): def drun(self):
run('docker run --expose 8080 -it --entrypoint="" ' + run('docker run --expose 8080 -it --entrypoint="" ' +
self.name() + ' /bin/bash', shell=True) self.name() + ' /bin/bash', shell=True, check=True)
def dockerhub_login(self): def dockerhub_login(self):
run('docker login --username ' + self.dockerhub_user + run('docker login --username ' + self.dockerhub_user +
' --password ' + self.dockerhub_password, shell=True) ' --password ' + self.dockerhub_password, shell=True, check=True)
def dockerhub_publish(self): def dockerhub_publish(self):
if(self.docker_publish_tag is not None): if self.docker_publish_tag is not None:
run('docker tag ' + self.name() + ' ' + self.dockerhub_user + run('docker tag ' + self.name() + ' ' + self.dockerhub_user +
'/' + self.name() + ':' + self.docker_publish_tag, shell=True) '/' + self.name() + ':' + self.docker_publish_tag, shell=True, check=True)
run('docker push ' + self.dockerhub_user + run('docker push ' + self.dockerhub_user +
'/' + self.name() + ':' + self.docker_publish_tag, shell=True) '/' + self.name() + ':' + self.docker_publish_tag, shell=True, check=True)
run('docker tag ' + self.name() + ' ' + self.dockerhub_user + run('docker tag ' + self.name() + ' ' + self.dockerhub_user +
'/' + self.name() + ':latest', shell=True) '/' + self.name() + ':latest', shell=True, check=True)
run('docker push ' + self.dockerhub_user + run('docker push ' + self.dockerhub_user +
'/' + self.name() + ':latest', shell=True) '/' + self.name() + ':latest', shell=True, check=True)
def test(self): def test(self):
run('docker build -t ' + self.name() + '-test ' + run('docker build -t ' + self.name() + '-test ' +
'--file ' + self.build_path() + '/test/Dockerfile ' '--file ' + self.build_path() + '/test/Dockerfile '
+ self.build_path() + '/test', shell=True) + self.build_path() + '/test', shell=True, check=True)

View file

@ -1,12 +1,14 @@
from os import path, chmod import sys
from os import chmod
from json import load, dumps from json import load, dumps
from subprocess import run from subprocess import run
from pkg_resources import * from packaging import version
from dda_python_terraform import * from pkg_resources import resource_string
from dda_python_terraform import Terraform, IsFlagged
from .python_util import filter_none from .python_util import filter_none
from .devops_build import DevopsBuild, create_devops_build_config from .devops_build import DevopsBuild, create_devops_build_config
from packaging import version
import sys
def create_devops_terraform_build_config(stage, def create_devops_terraform_build_config(stage,
@ -20,10 +22,12 @@ def create_devops_terraform_build_config(stage,
build_commons_path=None, build_commons_path=None,
terraform_build_commons_dir_name='terraform', terraform_build_commons_dir_name='terraform',
debug_print_terraform_command=False, debug_print_terraform_command=False,
additional_tfvar_files=[], additional_tfvar_files=None,
terraform_semantic_version="1.0.8"): terraform_semantic_version="1.0.8"):
if not output_json_name: if not output_json_name:
output_json_name = 'out_' + module + '.json' output_json_name = 'out_' + module + '.json'
if not additional_tfvar_files:
additional_tfvar_files = []
ret = create_devops_build_config( ret = create_devops_build_config(
stage, project_root_path, module, build_dir_name) stage, project_root_path, module, build_dir_name)
ret.update({'additional_vars': additional_vars, ret.update({'additional_vars': additional_vars,
@ -68,7 +72,7 @@ class DevopsTerraformBuild(DevopsBuild):
def copy_build_resource_file_from_package(self, name): def copy_build_resource_file_from_package(self, name):
my_data = resource_string( my_data = resource_string(
__name__, "src/main/resources/terraform/" + name) __name__, "src/main/resources/terraform/" + name)
with open(self.build_path() + '/' + name, "w") as output_file: with open(self.build_path() + '/' + name, "w", encoding="utf-8") as output_file:
output_file.write(my_data.decode(sys.stdout.encoding)) output_file.write(my_data.decode(sys.stdout.encoding))
def copy_build_resources_from_package(self): def copy_build_resources_from_package(self):
@ -77,13 +81,13 @@ class DevopsTerraformBuild(DevopsBuild):
def copy_build_resources_from_dir(self): def copy_build_resources_from_dir(self):
run('cp -f ' + self.terraform_build_commons_path() + run('cp -f ' + self.terraform_build_commons_path() +
'* ' + self.build_path(), shell=True) '* ' + self.build_path(), shell=False, check=False)
def copy_local_state(self): def copy_local_state(self):
run('cp terraform.tfstate ' + self.build_path(), shell=True) run('cp terraform.tfstate ' + self.build_path(), shell=False, check=False)
def rescue_local_state(self): def rescue_local_state(self):
run('cp ' + self.build_path() + '/terraform.tfstate .', shell=True) run('cp ' + self.build_path() + '/terraform.tfstate .', shell=False, check=False)
def initialize_build_dir(self): def initialize_build_dir(self):
super().initialize_build_dir() super().initialize_build_dir()
@ -92,126 +96,126 @@ class DevopsTerraformBuild(DevopsBuild):
else: else:
self.copy_build_resources_from_dir() self.copy_build_resources_from_dir()
self.copy_local_state() self.copy_local_state()
run('cp *.tf ' + self.build_path(), shell=True) run('cp *.tf ' + self.build_path(), shell=True, check=False)
run('cp *.properties ' + self.build_path(), shell=True) run('cp *.properties ' + self.build_path(), shell=True, check=False)
run('cp *.tfvars ' + self.build_path(), shell=True) run('cp *.tfvars ' + self.build_path(), shell=True, check=False)
run('cp -r scripts ' + self.build_path(), shell=True) run('cp -r scripts ' + self.build_path(), shell=True, check=False)
def post_build(self): def post_build(self):
self.rescue_local_state() self.rescue_local_state()
def init_client(self): def init_client(self):
tf = Terraform(working_dir=self.build_path(), terraform_semantic_version=self.terraform_semantic_version) terraform = Terraform(working_dir=self.build_path(), terraform_semantic_version=self.terraform_semantic_version)
tf.init() terraform.init()
self.print_terraform_command(tf) self.print_terraform_command(terraform)
if self.use_workspace: if self.use_workspace:
try: try:
tf.workspace('select', self.stage) terraform.workspace('select', self.stage)
self.print_terraform_command(tf) self.print_terraform_command(terraform)
except: except:
tf.workspace('new', self.stage) terraform.workspace('new', self.stage)
self.print_terraform_command(tf) self.print_terraform_command(terraform)
return tf return terraform
def write_output(self, tf): def write_output(self, terraform):
result = tf.output(json=IsFlagged) result = terraform.output(json=IsFlagged)
self.print_terraform_command(tf) self.print_terraform_command(terraform)
with open(self.build_path() + self.output_json_name, "w") as output_file: with open(self.build_path() + self.output_json_name, "w", encoding="utf-8") as output_file:
output_file.write(dumps(result)) output_file.write(dumps(result))
chmod(self.build_path() + self.output_json_name, 0o600) chmod(self.build_path() + self.output_json_name, 0o600)
def read_output_json(self): def read_output_json(self):
with open(self.build_path() + self.output_json_name, 'r') as f: with open(self.build_path() + self.output_json_name, 'r', encoding="utf-8") as file:
return load(f) return load(file)
def plan(self): def plan(self):
tf = self.init_client() terraform = self.init_client()
return_code, stdout, stderr = tf.plan(detailed_exitcode=None, capture_output=False, raise_on_error=False, return_code, _, stderr = terraform.plan(detailed_exitcode=None, capture_output=False, raise_on_error=False,
var=self.project_vars(), var=self.project_vars(),
var_file=self.additional_tfvar_files) var_file=self.additional_tfvar_files)
self.post_build() self.post_build()
self.print_terraform_command(tf) self.print_terraform_command(terraform)
if (return_code > 0): if return_code > 0:
raise Exception(return_code, "terraform error:", stderr) raise Exception(return_code, "terraform error:", stderr)
def plan_fail_on_diff(self): def plan_fail_on_diff(self):
tf = self.init_client() terraform = self.init_client()
return_code, stdout, stderr = tf.plan(detailed_exitcode=IsFlagged, capture_output=False, raise_on_error=False, return_code, _, stderr = terraform.plan(detailed_exitcode=IsFlagged, capture_output=False, raise_on_error=False,
var=self.project_vars(), var=self.project_vars(),
var_file=self.additional_tfvar_files) var_file=self.additional_tfvar_files)
self.post_build() self.post_build()
self.print_terraform_command(tf) self.print_terraform_command(terraform)
if (return_code != 0 and return_code != 2): if return_code not in (0, 2):
raise Exception(return_code, "terraform error:", stderr) raise Exception(return_code, "terraform error:", stderr)
if (return_code == 2): if return_code == 2:
raise Exception(return_code, "diff in config found:", stderr) raise Exception(return_code, "diff in config found:", stderr)
def apply(self, auto_approve=False): def apply(self, auto_approve=False):
tf = self.init_client() terraform = self.init_client()
if auto_approve: if auto_approve:
auto_approve_flag = IsFlagged auto_approve_flag = IsFlagged
else: else:
auto_approve_flag = None auto_approve_flag = None
if version.parse(self.terraform_semantic_version) >= version.parse("1.0.0"): if version.parse(self.terraform_semantic_version) >= version.parse("1.0.0"):
return_code, stdout, stderr = tf.apply(capture_output=False, raise_on_error=True, return_code, _, stderr = terraform.apply(capture_output=False, raise_on_error=True,
auto_approve=auto_approve_flag, auto_approve=auto_approve_flag,
var=self.project_vars(), var=self.project_vars(),
var_file=self.additional_tfvar_files) var_file=self.additional_tfvar_files)
else: else:
return_code, stdout, stderr = tf.apply(capture_output=False, raise_on_error=True, return_code, _, stderr = terraform.apply(capture_output=False, raise_on_error=True,
skip_plan=auto_approve, skip_plan=auto_approve,
var=self.project_vars(), var=self.project_vars(),
var_file=self.additional_tfvar_files) var_file=self.additional_tfvar_files)
self.write_output(tf) self.write_output(terraform)
self.post_build() self.post_build()
self.print_terraform_command(tf) self.print_terraform_command(terraform)
if (return_code > 0): if return_code > 0:
raise Exception(return_code, "terraform error:", stderr) raise Exception(return_code, "terraform error:", stderr)
def refresh(self, auto_approve=True): def refresh(self):
tf = self.init_client() terraform = self.init_client()
return_code, stdout, stderr = tf.refresh( return_code, _, stderr = terraform.refresh(
var=self.project_vars(), var=self.project_vars(),
var_file=self.additional_tfvar_files) var_file=self.additional_tfvar_files)
self.write_output(tf) self.write_output(terraform)
self.post_build() self.post_build()
self.print_terraform_command(tf) self.print_terraform_command(terraform)
if (return_code > 0): if return_code > 0:
raise Exception(return_code, "terraform error:", stderr) raise Exception(return_code, "terraform error:", stderr)
def destroy(self, auto_approve=False): def destroy(self, auto_approve=False):
tf = self.init_client() terraform = self.init_client()
if auto_approve: if auto_approve:
auto_approve_flag = IsFlagged auto_approve_flag = IsFlagged
else: else:
auto_approve_flag = None auto_approve_flag = None
if version.parse(self.terraform_semantic_version) >= version.parse("1.0.0"): if version.parse(self.terraform_semantic_version) >= version.parse("1.0.0"):
return_code, stdout, stderr = tf.destroy(capture_output=False, raise_on_error=True, return_code, _, stderr = terraform.destroy(capture_output=False, raise_on_error=True,
auto_approve=auto_approve_flag, auto_approve=auto_approve_flag,
var=self.project_vars(), var=self.project_vars(),
var_file=self.additional_tfvar_files) var_file=self.additional_tfvar_files)
else: else:
return_code, stdout, stderr = tf.destroy(capture_output=False, raise_on_error=True, return_code, _, stderr = terraform.destroy(capture_output=False, raise_on_error=True,
force=auto_approve_flag, force=auto_approve_flag,
var=self.project_vars(), var=self.project_vars(),
var_file=self.additional_tfvar_files) var_file=self.additional_tfvar_files)
self.post_build() self.post_build()
self.print_terraform_command(tf) self.print_terraform_command(terraform)
if (return_code > 0): if return_code > 0:
raise Exception(return_code, "terraform error:", stderr) raise Exception(return_code, "terraform error:", stderr)
def tf_import(self, tf_import_name, tf_import_resource,): def tf_import(self, tf_import_name, tf_import_resource,):
tf = self.init_client() terraform = self.init_client()
return_code, stdout, stderr = tf.import_cmd(tf_import_name, tf_import_resource, return_code, _, stderr = terraform.import_cmd(tf_import_name, tf_import_resource,
capture_output=False, raise_on_error=True, capture_output=False, raise_on_error=True,
var=self.project_vars(), var=self.project_vars(),
var_file=self.additional_tfvar_files) var_file=self.additional_tfvar_files)
self.post_build() self.post_build()
self.print_terraform_command(tf) self.print_terraform_command(terraform)
if (return_code > 0): if return_code > 0:
raise Exception(return_code, "terraform error:", stderr) raise Exception(return_code, "terraform error:", stderr)
def print_terraform_command(self, tf): def print_terraform_command(self, terraform):
if self.debug_print_terraform_command: if self.debug_print_terraform_command:
output = 'cd ' + self.build_path() + ' && ' + tf.latest_cmd() output = 'cd ' + self.build_path() + ' && ' + terraform.latest_cmd()
print(output) print(output)

View file

@ -1,7 +1,6 @@
from dda_python_terraform import * from dda_python_terraform import Terraform
from .digitalocean_terraform_build import DigitaloceanTerraformBuild from .digitalocean_terraform_build import DigitaloceanTerraformBuild
def add_digitalocean_backend_properties_mixin_config(config, def add_digitalocean_backend_properties_mixin_config(config,
account_name, account_name,
endpoint, endpoint,
@ -59,15 +58,15 @@ class DigitaloceanBackendPropertiesMixin(DigitaloceanTerraformBuild):
pass pass
def init_client(self): def init_client(self):
tf = Terraform(working_dir=self.build_path(), terraform = Terraform(working_dir=self.build_path(),
terraform_semantic_version=self.terraform_semantic_version) terraform_semantic_version=self.terraform_semantic_version)
tf.init(backend_config=self.backend_config) terraform.init(backend_config=self.backend_config)
self.print_terraform_command(tf) self.print_terraform_command(terraform)
if self.use_workspace: if self.use_workspace:
try: try:
tf.workspace('select', self.stage) terraform.workspace('select', self.stage)
self.print_terraform_command(tf) self.print_terraform_command(terraform)
except: except:
tf.workspace('new', self.stage) terraform.workspace('new', self.stage)
self.print_terraform_command(tf) self.print_terraform_command(terraform)
return tf return terraform

View file

@ -15,9 +15,11 @@ def create_digitalocean_terraform_build_config(stage,
build_commons_path=None, build_commons_path=None,
terraform_build_commons_dir_name='terraform', terraform_build_commons_dir_name='terraform',
debug_print_terraform_command=False, debug_print_terraform_command=False,
additional_tfvar_files=[], additional_tfvar_files=None,
terraform_semantic_version="1.0.8", terraform_semantic_version="1.0.8",
): ):
if not additional_tfvar_files:
additional_tfvar_files = []
config = create_devops_terraform_build_config(stage, config = create_devops_terraform_build_config(stage,
project_root_path, project_root_path,
module, module,

View file

@ -1,22 +1,21 @@
from string import Template from string import Template
from subprocess import run from .python_util import execute_live
from .python_util import *
from .devops_build import DevopsBuild from .devops_build import DevopsBuild
config_base = """ CONFIG_BASE = """
fqdn: $fqdn fqdn: $fqdn
""" """
config_ipv4 = """node: CONFIG_IPV4 = """node:
ipv4: $ipv4 ipv4: $ipv4
""" """
config_ipv6 = """ ipv6: $ipv6 CONFIG_IPV6 = """ ipv6: $ipv6
""" """
config_certmanager = """certmanager: CONFIG_CERTMANAGER = """certmanager:
email: $letsencrypt_email email: $letsencrypt_email
letsencryptEndpoint: $letsencrypt_endpoint letsencryptEndpoint: $letsencrypt_endpoint
""" """
config_echo = """echo: $echo CONFIG_ECHO = """echo: $echo
""" """
@ -31,16 +30,16 @@ def add_provs_k3s_mixin_config(config,
ipv6=None, ipv6=None,
app_filename_to_provision=None): app_filename_to_provision=None):
template_text = k3s_config_template template_text = k3s_config_template
if(template_text == None): if template_text is None:
template_text = config_base template_text = CONFIG_BASE
if(letsencrypt_endpoint != None): if letsencrypt_endpoint is not None:
template_text += config_certmanager template_text += CONFIG_CERTMANAGER
if(echo != None): if echo is not None:
template_text += config_echo template_text += CONFIG_ECHO
if(ipv4 != None): if ipv4 is not None:
template_text += config_ipv4 template_text += CONFIG_IPV4
if(ipv6 != None): if ipv6 is not None:
template_text += config_ipv6 template_text += CONFIG_IPV6
config.update({'ProvsK3sMixin': config.update({'ProvsK3sMixin':
{'fqdn': fqdn, {'fqdn': fqdn,
@ -89,10 +88,10 @@ class ProvsK3sMixin(DevopsBuild):
self.ipv6 = ipv6 self.ipv6 = ipv6
self.put('ipv6', ipv6) self.put('ipv6', ipv6)
template_text = self.k3s_config_template_text template_text = self.k3s_config_template_text
if(ipv4 != None): if ipv4 is not None:
template_text += config_ipv4 template_text += CONFIG_IPV4
if(ipv6 != None): if ipv6 is not None:
template_text += config_ipv6 template_text += CONFIG_IPV6
self.k3s_config_template_text = template_text self.k3s_config_template_text = template_text
self.put('k3s_config_template_text', template_text) self.put('k3s_config_template_text', template_text)
template = Template(template_text) template = Template(template_text)
@ -102,18 +101,14 @@ class ProvsK3sMixin(DevopsBuild):
def write_provs_config(self): def write_provs_config(self):
substitutes = self.get_keys(['fqdn', 'ipv4', 'ipv6', 'letsencrypt_email', substitutes = self.get_keys(['fqdn', 'ipv4', 'ipv6', 'letsencrypt_email',
'letsencrypt_endpoint', 'echo']) 'letsencrypt_endpoint', 'echo'])
with open(self.build_path() + '/out_k3sServerConfig.yaml', "w") as output_file: with open(self.build_path() + '/out_k3sServerConfig.yaml', "w", encoding="utf-8") as output_file:
output_file.write(self.k3s_config_template.substitute(substitutes)) output_file.write(self.k3s_config_template.substitute(substitutes))
def provs_server(self, dry_run=False): def provs_server(self, dry_run=False):
result = ''
cmd = ['provs-server.jar', 'k3s', self.provision_user + '@' + self.fqdn, '-c', cmd = ['provs-server.jar', 'k3s', self.provision_user + '@' + self.fqdn, '-c',
self.build_path() + '/out_k3sServerConfig.yaml', self.build_path() + '/out_k3sServerConfig.yaml',
'-a', self.build_path() + '/' + self.app_filename_to_provision] '-a', self.build_path() + '/' + self.app_filename_to_provision]
prn_cmd = list(cmd) if dry_run:
print(" ".join(prn_cmd)) print(" ".join(cmd))
if (not dry_run): else:
result = execute(cmd) execute_live(cmd)
print(result)
return result

View file

@ -1,4 +1,4 @@
from subprocess import check_output from subprocess import check_output, Popen, PIPE
import sys import sys
def execute(cmd, shell=False): def execute(cmd, shell=False):
@ -8,5 +8,12 @@ def execute(cmd, shell=False):
output = check_output(cmd, shell=shell) output = check_output(cmd, shell=shell)
return output.rstrip() return output.rstrip()
def filter_none(list): def execute_live(cmd):
return [x for x in list if x is not None] process = Popen(cmd, stdout=PIPE)
for line in iter(process.stdout.readline, b''):
print(line.decode('utf-8'), end='')
process.stdout.close()
process.wait()
def filter_none(list_to_filter):
return [x for x in list_to_filter if x is not None]

0
src/test/__init__.py Normal file
View file

View file

@ -0,0 +1,48 @@
import os
from pybuilder.core import Project
from src.main.python.ddadevops.c4k_mixin import C4kMixin, add_c4k_mixin_config
class MyC4kMixin(C4kMixin):
pass
def test_c4k_mixin(tmp_path):
build_dir = 'build'
project_name = 'testing-project'
module_name = 'c4k-test'
tmp_path_str = str(tmp_path)
project = Project(tmp_path_str, name=project_name)
project_config = {
'stage': 'test',
'project_root_path': tmp_path_str,
'module': module_name,
'build_dir_name': build_dir
}
config = {'a': 1, 'b': 2}
auth = {'c': 3, 'd': 4}
add_c4k_mixin_config(project_config, module_name, config, auth, grafana_cloud_user='user', grafana_cloud_password='password')
assert project_config.get('C4kMixin') is not None
assert project_config.get('C4kMixin').get('Name') is module_name
assert project_config.get('C4kMixin').get('Config') is config
assert project_config.get('C4kMixin').get('Auth') is auth
mixin = MyC4kMixin(project, project_config)
mixin.initialize_build_dir()
assert mixin.build_path() == f'{tmp_path_str}/{build_dir}/{project_name}/{module_name}'
mixin.put('fqdn', 'testing.test')
mixin.write_c4k_config()
assert 'fqdn' in mixin.c4k_mixin_config
assert 'mon-config' in mixin.c4k_mixin_config
assert os.path.exists(f'{mixin.build_path()}/out_config.edn')
mixin.write_c4k_auth()
assert 'mon-auth' in mixin.c4k_mixin_auth
assert os.path.exists(f'{mixin.build_path()}/out_auth.edn')