add c4k-mixin
This commit is contained in:
parent
1b9e7f20fd
commit
b38b0f1f87
21 changed files with 289 additions and 180 deletions
|
@ -6,31 +6,34 @@ before_script:
|
|||
- pip install -r requirements.txt
|
||||
|
||||
stages:
|
||||
- lint
|
||||
- lint&test
|
||||
- upload
|
||||
- image
|
||||
|
||||
flake8:
|
||||
stage: lint
|
||||
allow_failure: true
|
||||
stage: lint&test
|
||||
script:
|
||||
- pip install -r dev_requirements.txt
|
||||
- flake8 --max-line-length=120 --count --select=E9,F63,F7,F82 --show-source --statistics src/main/python/ddadevops/*.py
|
||||
- flake8 --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics src/main/python/ddadevops/*.py
|
||||
|
||||
mypy:
|
||||
stage: lint
|
||||
allow_failure: true
|
||||
stage: lint&test
|
||||
script:
|
||||
- pip install -r dev_requirements.txt
|
||||
- python -m mypy src/main/python/ddadevops/*.py --ignore-missing-imports
|
||||
|
||||
pylint:
|
||||
stage: lint
|
||||
allow_failure: true
|
||||
stage: lint&test
|
||||
script:
|
||||
- pip install -r dev_requirements.txt
|
||||
- pylint -d C0301 src/main/python/ddadevops/*.py
|
||||
- pylint -d C0301,W0614,R0201,C0114,C0115,C0116,similarities,W0702,W0702,R0913,R0902,R0914,R1732 src/main/python/ddadevops/*.py
|
||||
|
||||
pytest:
|
||||
stage: lint&test
|
||||
script:
|
||||
- pip install -r dev_requirements.txt
|
||||
- pytest
|
||||
|
||||
pypi-stable:
|
||||
stage: upload
|
||||
|
|
2
build.py
2
build.py
|
@ -28,7 +28,7 @@ use_plugin("python.distutils")
|
|||
default_task = "publish"
|
||||
|
||||
name = "ddadevops"
|
||||
version = "3.0.2"
|
||||
version = "3.0.18"
|
||||
summary = "tools to support builds combining gopass, terraform, dda-pallet, aws & hetzner-cloud"
|
||||
description = __doc__
|
||||
authors = [Author("meissa GmbH", "buero@meissa-gmbh.de")]
|
||||
|
|
|
@ -9,3 +9,4 @@ pylint==2.15.8
|
|||
pytest==7.2
|
||||
pytest-cov==4.0
|
||||
pytest-datafiles==2.0
|
||||
types-setuptools==65.6.0.2
|
||||
|
|
0
src/__init__.py
Normal file
0
src/__init__.py
Normal file
0
src/main/__init__.py
Normal file
0
src/main/__init__.py
Normal file
0
src/main/python/__init__.py
Normal file
0
src/main/python/__init__.py
Normal file
|
@ -9,6 +9,7 @@ from .provs_k3s_mixin import ProvsK3sMixin, add_provs_k3s_mixin_config
|
|||
from .aws_rds_pg_mixin import AwsRdsPgMixin, add_aws_rds_pg_mixin_config
|
||||
from .aws_mfa_mixin import AwsMfaMixin, add_aws_mfa_mixin_config
|
||||
from .aws_backend_properties_mixin import AwsBackendPropertiesMixin, add_aws_backend_properties_mixin_config
|
||||
from .c4k_mixin import C4kMixin, add_c4k_mixin_config
|
||||
from .exoscale_mixin import ExoscaleMixin, add_exoscale_mixin_config
|
||||
from .digitalocean_backend_properties_mixin import DigitaloceanBackendPropertiesMixin, add_digitalocean_backend_properties_mixin_config
|
||||
from .digitalocean_terraform_build import DigitaloceanTerraformBuild, create_digitalocean_terraform_build_config
|
||||
|
@ -16,6 +17,6 @@ from .hetzner_mixin import HetznerMixin, add_hetzner_mixin_config
|
|||
from .devops_docker_build import DevopsDockerBuild, create_devops_docker_build_config
|
||||
from .devops_terraform_build import DevopsTerraformBuild, create_devops_terraform_build_config
|
||||
from .devops_build import DevopsBuild, create_devops_build_config, get_devops_build, get_tag_from_latest_commit
|
||||
from .credential import gopass_credential_from_env_path, gopass_credential_from_path, gopass_password_from_path, gopass_field_from_path
|
||||
from .credential import gopass_password_from_path, gopass_field_from_path
|
||||
|
||||
__version__ = "${version}"
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
from dda_python_terraform import *
|
||||
from dda_python_terraform import Terraform
|
||||
from .devops_terraform_build import DevopsTerraformBuild
|
||||
|
||||
|
||||
def add_aws_backend_properties_mixin_config(config, account_name):
|
||||
config.update({'AwsBackendPropertiesMixin':
|
||||
{'account_name': account_name}})
|
||||
return config
|
||||
|
||||
|
||||
class AwsBackendPropertiesMixin(DevopsTerraformBuild):
|
||||
|
||||
def __init__(self, project, config):
|
||||
|
@ -38,14 +36,14 @@ class AwsBackendPropertiesMixin(DevopsTerraformBuild):
|
|||
pass
|
||||
|
||||
def init_client(self):
|
||||
tf = Terraform(working_dir=self.build_path(), terraform_semantic_version=self.terraform_semantic_version)
|
||||
tf.init(backend_config=self.backend_config)
|
||||
self.print_terraform_command(tf)
|
||||
terraform = Terraform(working_dir=self.build_path(), terraform_semantic_version=self.terraform_semantic_version)
|
||||
terraform.init(backend_config=self.backend_config)
|
||||
self.print_terraform_command(terraform)
|
||||
if self.use_workspace:
|
||||
try:
|
||||
tf.workspace('select', self.stage)
|
||||
self.print_terraform_command(tf)
|
||||
terraform.workspace('select', self.stage)
|
||||
self.print_terraform_command(terraform)
|
||||
except:
|
||||
tf.workspace('new', self.stage)
|
||||
self.print_terraform_command(tf)
|
||||
return tf
|
||||
terraform.workspace('new', self.stage)
|
||||
self.print_terraform_command(terraform)
|
||||
return terraform
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from boto3 import *
|
||||
from boto3 import Session
|
||||
from .python_util import execute
|
||||
from .aws_backend_properties_mixin import AwsBackendPropertiesMixin
|
||||
|
||||
|
@ -39,13 +39,13 @@ class AwsMfaMixin(AwsBackendPropertiesMixin):
|
|||
return ret
|
||||
|
||||
def get_username_from_account(self, p_account_name):
|
||||
login_id = execute('cat ~/.aws/accounts | grep -A 2 "\[' + p_account_name +
|
||||
'\]" | grep username | awk -F= \'{print $2}\'', shell=True)
|
||||
login_id = execute(r'cat ~/.aws/accounts | grep -A 2 "\[' + p_account_name +
|
||||
r'\]" | grep username | awk -F= \'{print $2}\'', shell=True)
|
||||
return login_id
|
||||
|
||||
def get_account_id_from_account(self, p_account_name):
|
||||
account_id = execute('cat ~/.aws/accounts | grep -A 2 "\[' + p_account_name +
|
||||
'\]" | grep account | awk -F= \'{print $2}\'', shell=True)
|
||||
account_id = execute(r'cat ~/.aws/accounts | grep -A 2 "\[' + p_account_name +
|
||||
r'\]" | grep account | awk -F= \'{print $2}\'', shell=True)
|
||||
return account_id
|
||||
|
||||
def get_mfa(self, mfa_path='aws'):
|
||||
|
@ -56,7 +56,7 @@ class AwsMfaMixin(AwsBackendPropertiesMixin):
|
|||
execute('aws configure --profile ' + to_profile +
|
||||
' set ' + key + ' ' + secret, shell=True)
|
||||
|
||||
def get_mfa_session(self, toke=None):
|
||||
def get_mfa_session(self):
|
||||
from_account_name = self.mfa_account_prefix + self.mfa_login_account_suffix
|
||||
from_account_id = self.get_account_id_from_account(from_account_name)
|
||||
to_account_name = self.mfa_account_prefix + self.account_name
|
||||
|
|
|
@ -110,8 +110,8 @@ class AwsRdsPgMixin(DevopsBuild):
|
|||
alter_stmt = self.execute_pg_rds_sql(superuser_name, superuser_password, alter_objects)
|
||||
alter_stmt.strip()
|
||||
|
||||
if (alter_stmt != ''):
|
||||
if alter_stmt != '':
|
||||
print('apply alter statements? \n', alter_stmt)
|
||||
proceed = input('\n[y/n] \n')
|
||||
if(proceed == 'y'):
|
||||
if proceed == 'y':
|
||||
self.execute_pg_rds_sql(superuser_name, superuser_password, alter_stmt)
|
||||
|
|
66
src/main/python/ddadevops/c4k_mixin.py
Normal file
66
src/main/python/ddadevops/c4k_mixin.py
Normal file
|
@ -0,0 +1,66 @@
|
|||
from os import chmod
|
||||
from .python_util import execute
|
||||
from .devops_build import DevopsBuild
|
||||
from .credential import gopass_field_from_path, gopass_password_from_path
|
||||
|
||||
def add_c4k_mixin_config(config,
|
||||
c4k_module_name,
|
||||
c4k_config_dict,
|
||||
c4k_auth_dict,
|
||||
grafana_cloud_user=None,
|
||||
grafana_cloud_password=None,
|
||||
grafana_cloud_url='https://prometheus-prod-01-eu-west-0.grafana.net/api/prom/push'):
|
||||
if not grafana_cloud_user:
|
||||
grafana_cloud_user = gopass_field_from_path('server/meissa/grafana-cloud', 'grafana-cloud-user')
|
||||
if not grafana_cloud_password:
|
||||
grafana_cloud_password = gopass_password_from_path('server/meissa/grafana-cloud')
|
||||
c4k_auth_dict.update({'mon-auth': {
|
||||
'grafana-cloud-user': grafana_cloud_user,
|
||||
'grafana-cloud-password': grafana_cloud_password
|
||||
}})
|
||||
c4k_config_dict.update({'mon-config': {
|
||||
'grafana-cloud-url': grafana_cloud_url
|
||||
}})
|
||||
config.update({'C4kMixin': {'Config': c4k_config_dict,
|
||||
'Auth': c4k_auth_dict,
|
||||
'Name': c4k_module_name}})
|
||||
return config
|
||||
|
||||
|
||||
class C4kMixin(DevopsBuild):
|
||||
|
||||
def __init__(self, project, config):
|
||||
super().__init__(project, config)
|
||||
self.c4k_mixin_config = config['C4kMixin']['Config']
|
||||
self.c4k_mixin_auth = config['C4kMixin']['Auth']
|
||||
self.c4k_module_name = config['C4kMixin']['Name']
|
||||
self.c4k_mixin_config.update({'mon-config': {
|
||||
'cluster-name': f':{self.c4k_module_name}',
|
||||
'cluster-stage': f':{self.stage}'}})
|
||||
|
||||
def __generate_clojure_map(self, template_dict):
|
||||
clojure_map_str = '{'
|
||||
for key, value in template_dict.items():
|
||||
if isinstance(value, dict):
|
||||
clojure_map_str += f':{key} {self.__generate_clojure_map(value)}\n'
|
||||
else:
|
||||
clojure_map_str += f':{key} "{value}"\n'
|
||||
clojure_map_str += '}'
|
||||
return clojure_map_str
|
||||
|
||||
def write_c4k_config(self):
|
||||
fqdn = self.get('fqdn')
|
||||
self.c4k_mixin_config.update({'fqdn':fqdn})
|
||||
with open(self.build_path() + '/out_config.edn', 'w', encoding="utf-8") as output_file:
|
||||
output_file.write(self.__generate_clojure_map(self.c4k_mixin_config))
|
||||
|
||||
def write_c4k_auth(self):
|
||||
with open(self.build_path() + '/out_auth.edn', 'w', encoding="utf-8") as output_file:
|
||||
output_file.write(self.__generate_clojure_map(self.c4k_mixin_auth))
|
||||
chmod(self.build_path() + '/out_auth.edn', 0o600)
|
||||
|
||||
def c4k_apply(self):
|
||||
cmd = f'c4k-{self.c4k_module_name}-standalone.jar {self.build_path()}/out_config.edn {self.build_path()}/out_auth.edn > {self.build_path()}/out_{self.c4k_module_name}.yaml'
|
||||
output = execute(cmd, True)
|
||||
print(output)
|
||||
return output
|
|
@ -1,17 +1,4 @@
|
|||
from .python_util import *
|
||||
import deprecation
|
||||
|
||||
@deprecation.deprecated(deprecated_in="0.5.0", removed_in="1.0",
|
||||
details='use gopass_password_from_path(os.environ.get(env_var, None)) instead')
|
||||
def gopass_credential_from_env_path (env_var):
|
||||
env_path = os.environ.get(env_var, None)
|
||||
return gopass_password_from_path(env_path)
|
||||
|
||||
@deprecation.deprecated(deprecated_in="0.5.0", removed_in="1.0",
|
||||
details='use gopass_password_from_path(path) instead')
|
||||
def gopass_credential_from_path (path):
|
||||
return gopass_password_from_path(path)
|
||||
|
||||
from .python_util import execute
|
||||
|
||||
def gopass_field_from_path (path, field):
|
||||
credential = None
|
||||
|
@ -26,4 +13,3 @@ def gopass_password_from_path (path):
|
|||
print('get password for: ' + path)
|
||||
credential = execute(['gopass', 'show', '--password', path])
|
||||
return credential
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
from subprocess import run
|
||||
from subprocess import run, CalledProcessError
|
||||
from .python_util import filter_none
|
||||
|
||||
|
||||
def create_devops_build_config(stage, project_root_path, module,
|
||||
build_dir_name='target'):
|
||||
return {'stage': stage,
|
||||
|
@ -13,11 +12,12 @@ def get_devops_build(project):
|
|||
return project.get_property('devops_build')
|
||||
|
||||
def get_tag_from_latest_commit():
|
||||
value = run('git describe --abbrev=0 --tags --exact-match', shell=True,
|
||||
capture_output=True)
|
||||
if value.returncode != 0:
|
||||
try:
|
||||
value = run('git describe --abbrev=0 --tags --exact-match', shell=True,
|
||||
capture_output=True, check=True)
|
||||
return value.stdout.decode('UTF-8').rstrip()
|
||||
except CalledProcessError:
|
||||
return None
|
||||
return value.stdout.decode('UTF-8').rstrip()
|
||||
|
||||
class DevopsBuild:
|
||||
|
||||
|
@ -42,8 +42,8 @@ class DevopsBuild:
|
|||
return '/'.join(filter_none(mylist))
|
||||
|
||||
def initialize_build_dir(self):
|
||||
run('rm -rf ' + self.build_path(), shell=True)
|
||||
run('mkdir -p ' + self.build_path(), shell=True)
|
||||
run('rm -rf ' + self.build_path(), shell=True, check=True)
|
||||
run('mkdir -p ' + self.build_path(), shell=True, check=True)
|
||||
|
||||
def put(self, key, value):
|
||||
self.stack[key] = value
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
from subprocess import run
|
||||
from .python_util import filter_none
|
||||
from pkg_resources import *
|
||||
from .devops_build import DevopsBuild, create_devops_build_config
|
||||
import sys
|
||||
|
||||
from subprocess import run
|
||||
from pkg_resources import resource_string
|
||||
from .python_util import filter_none
|
||||
from .devops_build import DevopsBuild, create_devops_build_config
|
||||
|
||||
def create_devops_docker_build_config(stage,
|
||||
project_root_path,
|
||||
|
@ -44,10 +43,10 @@ class DevopsDockerBuild(DevopsBuild):
|
|||
return '/'.join(filter_none(mylist)) + '/'
|
||||
|
||||
def copy_build_resource_file_from_package(self, name):
|
||||
run('mkdir -p ' + self.build_path() + '/image/resources', shell=True)
|
||||
run('mkdir -p ' + self.build_path() + '/image/resources', shell=True, check=True)
|
||||
my_data = resource_string(
|
||||
__name__, "src/main/resources/docker/" + name)
|
||||
with open(self.build_path() + '/' + name, "w") as output_file:
|
||||
with open(self.build_path() + '/' + name, "w", encoding="utf-8") as output_file:
|
||||
output_file.write(my_data.decode(sys.stdout.encoding))
|
||||
|
||||
def copy_build_resources_from_package(self):
|
||||
|
@ -56,7 +55,7 @@ class DevopsDockerBuild(DevopsBuild):
|
|||
|
||||
def copy_build_resources_from_dir(self):
|
||||
run('cp -f ' + self.docker_build_commons_path() +
|
||||
'* ' + self.build_path(), shell=True)
|
||||
'* ' + self.build_path(), shell=True, check=True)
|
||||
|
||||
def initialize_build_dir(self):
|
||||
super().initialize_build_dir()
|
||||
|
@ -64,34 +63,34 @@ class DevopsDockerBuild(DevopsBuild):
|
|||
self.copy_build_resources_from_package()
|
||||
else:
|
||||
self.copy_build_resources_from_dir()
|
||||
run('cp -r image ' + self.build_path(), shell=True)
|
||||
run('cp -r test ' + self.build_path(), shell=True)
|
||||
run('cp -r image ' + self.build_path(), shell=True, check=True)
|
||||
run('cp -r test ' + self.build_path(), shell=True, check=True)
|
||||
|
||||
def image(self):
|
||||
run('docker build -t ' + self.name() +
|
||||
' --file ' + self.build_path() + '/image/Dockerfile '
|
||||
+ self.build_path() + '/image', shell=True)
|
||||
+ self.build_path() + '/image', shell=True, check=True)
|
||||
|
||||
def drun(self):
|
||||
run('docker run --expose 8080 -it --entrypoint="" ' +
|
||||
self.name() + ' /bin/bash', shell=True)
|
||||
self.name() + ' /bin/bash', shell=True, check=True)
|
||||
|
||||
def dockerhub_login(self):
|
||||
run('docker login --username ' + self.dockerhub_user +
|
||||
' --password ' + self.dockerhub_password, shell=True)
|
||||
' --password ' + self.dockerhub_password, shell=True, check=True)
|
||||
|
||||
def dockerhub_publish(self):
|
||||
if(self.docker_publish_tag is not None):
|
||||
if self.docker_publish_tag is not None:
|
||||
run('docker tag ' + self.name() + ' ' + self.dockerhub_user +
|
||||
'/' + self.name() + ':' + self.docker_publish_tag, shell=True)
|
||||
'/' + self.name() + ':' + self.docker_publish_tag, shell=True, check=True)
|
||||
run('docker push ' + self.dockerhub_user +
|
||||
'/' + self.name() + ':' + self.docker_publish_tag, shell=True)
|
||||
'/' + self.name() + ':' + self.docker_publish_tag, shell=True, check=True)
|
||||
run('docker tag ' + self.name() + ' ' + self.dockerhub_user +
|
||||
'/' + self.name() + ':latest', shell=True)
|
||||
'/' + self.name() + ':latest', shell=True, check=True)
|
||||
run('docker push ' + self.dockerhub_user +
|
||||
'/' + self.name() + ':latest', shell=True)
|
||||
'/' + self.name() + ':latest', shell=True, check=True)
|
||||
|
||||
def test(self):
|
||||
run('docker build -t ' + self.name() + '-test ' +
|
||||
'--file ' + self.build_path() + '/test/Dockerfile '
|
||||
+ self.build_path() + '/test', shell=True)
|
||||
+ self.build_path() + '/test', shell=True, check=True)
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
from os import path, chmod
|
||||
import sys
|
||||
from os import chmod
|
||||
from json import load, dumps
|
||||
from subprocess import run
|
||||
from pkg_resources import *
|
||||
from dda_python_terraform import *
|
||||
from packaging import version
|
||||
from pkg_resources import resource_string
|
||||
from dda_python_terraform import Terraform, IsFlagged
|
||||
from .python_util import filter_none
|
||||
from .devops_build import DevopsBuild, create_devops_build_config
|
||||
from packaging import version
|
||||
import sys
|
||||
|
||||
|
||||
|
||||
|
||||
def create_devops_terraform_build_config(stage,
|
||||
|
@ -20,10 +22,12 @@ def create_devops_terraform_build_config(stage,
|
|||
build_commons_path=None,
|
||||
terraform_build_commons_dir_name='terraform',
|
||||
debug_print_terraform_command=False,
|
||||
additional_tfvar_files=[],
|
||||
additional_tfvar_files=None,
|
||||
terraform_semantic_version="1.0.8"):
|
||||
if not output_json_name:
|
||||
output_json_name = 'out_' + module + '.json'
|
||||
if not additional_tfvar_files:
|
||||
additional_tfvar_files = []
|
||||
ret = create_devops_build_config(
|
||||
stage, project_root_path, module, build_dir_name)
|
||||
ret.update({'additional_vars': additional_vars,
|
||||
|
@ -68,7 +72,7 @@ class DevopsTerraformBuild(DevopsBuild):
|
|||
def copy_build_resource_file_from_package(self, name):
|
||||
my_data = resource_string(
|
||||
__name__, "src/main/resources/terraform/" + name)
|
||||
with open(self.build_path() + '/' + name, "w") as output_file:
|
||||
with open(self.build_path() + '/' + name, "w", encoding="utf-8") as output_file:
|
||||
output_file.write(my_data.decode(sys.stdout.encoding))
|
||||
|
||||
def copy_build_resources_from_package(self):
|
||||
|
@ -77,13 +81,13 @@ class DevopsTerraformBuild(DevopsBuild):
|
|||
|
||||
def copy_build_resources_from_dir(self):
|
||||
run('cp -f ' + self.terraform_build_commons_path() +
|
||||
'* ' + self.build_path(), shell=True)
|
||||
'* ' + self.build_path(), shell=False, check=False)
|
||||
|
||||
def copy_local_state(self):
|
||||
run('cp terraform.tfstate ' + self.build_path(), shell=True)
|
||||
run('cp terraform.tfstate ' + self.build_path(), shell=False, check=False)
|
||||
|
||||
def rescue_local_state(self):
|
||||
run('cp ' + self.build_path() + '/terraform.tfstate .', shell=True)
|
||||
run('cp ' + self.build_path() + '/terraform.tfstate .', shell=False, check=False)
|
||||
|
||||
def initialize_build_dir(self):
|
||||
super().initialize_build_dir()
|
||||
|
@ -92,126 +96,126 @@ class DevopsTerraformBuild(DevopsBuild):
|
|||
else:
|
||||
self.copy_build_resources_from_dir()
|
||||
self.copy_local_state()
|
||||
run('cp *.tf ' + self.build_path(), shell=True)
|
||||
run('cp *.properties ' + self.build_path(), shell=True)
|
||||
run('cp *.tfvars ' + self.build_path(), shell=True)
|
||||
run('cp -r scripts ' + self.build_path(), shell=True)
|
||||
run('cp *.tf ' + self.build_path(), shell=True, check=False)
|
||||
run('cp *.properties ' + self.build_path(), shell=True, check=False)
|
||||
run('cp *.tfvars ' + self.build_path(), shell=True, check=False)
|
||||
run('cp -r scripts ' + self.build_path(), shell=True, check=False)
|
||||
|
||||
def post_build(self):
|
||||
self.rescue_local_state()
|
||||
|
||||
def init_client(self):
|
||||
tf = Terraform(working_dir=self.build_path(), terraform_semantic_version=self.terraform_semantic_version)
|
||||
tf.init()
|
||||
self.print_terraform_command(tf)
|
||||
terraform = Terraform(working_dir=self.build_path(), terraform_semantic_version=self.terraform_semantic_version)
|
||||
terraform.init()
|
||||
self.print_terraform_command(terraform)
|
||||
if self.use_workspace:
|
||||
try:
|
||||
tf.workspace('select', self.stage)
|
||||
self.print_terraform_command(tf)
|
||||
terraform.workspace('select', self.stage)
|
||||
self.print_terraform_command(terraform)
|
||||
except:
|
||||
tf.workspace('new', self.stage)
|
||||
self.print_terraform_command(tf)
|
||||
return tf
|
||||
terraform.workspace('new', self.stage)
|
||||
self.print_terraform_command(terraform)
|
||||
return terraform
|
||||
|
||||
def write_output(self, tf):
|
||||
result = tf.output(json=IsFlagged)
|
||||
self.print_terraform_command(tf)
|
||||
with open(self.build_path() + self.output_json_name, "w") as output_file:
|
||||
def write_output(self, terraform):
|
||||
result = terraform.output(json=IsFlagged)
|
||||
self.print_terraform_command(terraform)
|
||||
with open(self.build_path() + self.output_json_name, "w", encoding="utf-8") as output_file:
|
||||
output_file.write(dumps(result))
|
||||
chmod(self.build_path() + self.output_json_name, 0o600)
|
||||
|
||||
def read_output_json(self):
|
||||
with open(self.build_path() + self.output_json_name, 'r') as f:
|
||||
return load(f)
|
||||
with open(self.build_path() + self.output_json_name, 'r', encoding="utf-8") as file:
|
||||
return load(file)
|
||||
|
||||
def plan(self):
|
||||
tf = self.init_client()
|
||||
return_code, stdout, stderr = tf.plan(detailed_exitcode=None, capture_output=False, raise_on_error=False,
|
||||
terraform = self.init_client()
|
||||
return_code, _, stderr = terraform.plan(detailed_exitcode=None, capture_output=False, raise_on_error=False,
|
||||
var=self.project_vars(),
|
||||
var_file=self.additional_tfvar_files)
|
||||
self.post_build()
|
||||
self.print_terraform_command(tf)
|
||||
if (return_code > 0):
|
||||
self.print_terraform_command(terraform)
|
||||
if return_code > 0:
|
||||
raise Exception(return_code, "terraform error:", stderr)
|
||||
|
||||
def plan_fail_on_diff(self):
|
||||
tf = self.init_client()
|
||||
return_code, stdout, stderr = tf.plan(detailed_exitcode=IsFlagged, capture_output=False, raise_on_error=False,
|
||||
terraform = self.init_client()
|
||||
return_code, _, stderr = terraform.plan(detailed_exitcode=IsFlagged, capture_output=False, raise_on_error=False,
|
||||
var=self.project_vars(),
|
||||
var_file=self.additional_tfvar_files)
|
||||
self.post_build()
|
||||
self.print_terraform_command(tf)
|
||||
if (return_code != 0 and return_code != 2):
|
||||
self.print_terraform_command(terraform)
|
||||
if return_code not in (0, 2):
|
||||
raise Exception(return_code, "terraform error:", stderr)
|
||||
if (return_code == 2):
|
||||
if return_code == 2:
|
||||
raise Exception(return_code, "diff in config found:", stderr)
|
||||
|
||||
def apply(self, auto_approve=False):
|
||||
tf = self.init_client()
|
||||
terraform = self.init_client()
|
||||
if auto_approve:
|
||||
auto_approve_flag = IsFlagged
|
||||
else:
|
||||
auto_approve_flag = None
|
||||
if version.parse(self.terraform_semantic_version) >= version.parse("1.0.0"):
|
||||
return_code, stdout, stderr = tf.apply(capture_output=False, raise_on_error=True,
|
||||
return_code, _, stderr = terraform.apply(capture_output=False, raise_on_error=True,
|
||||
auto_approve=auto_approve_flag,
|
||||
var=self.project_vars(),
|
||||
var_file=self.additional_tfvar_files)
|
||||
else:
|
||||
return_code, stdout, stderr = tf.apply(capture_output=False, raise_on_error=True,
|
||||
return_code, _, stderr = terraform.apply(capture_output=False, raise_on_error=True,
|
||||
skip_plan=auto_approve,
|
||||
var=self.project_vars(),
|
||||
var_file=self.additional_tfvar_files)
|
||||
self.write_output(tf)
|
||||
self.write_output(terraform)
|
||||
self.post_build()
|
||||
self.print_terraform_command(tf)
|
||||
if (return_code > 0):
|
||||
self.print_terraform_command(terraform)
|
||||
if return_code > 0:
|
||||
raise Exception(return_code, "terraform error:", stderr)
|
||||
|
||||
def refresh(self, auto_approve=True):
|
||||
tf = self.init_client()
|
||||
return_code, stdout, stderr = tf.refresh(
|
||||
def refresh(self):
|
||||
terraform = self.init_client()
|
||||
return_code, _, stderr = terraform.refresh(
|
||||
var=self.project_vars(),
|
||||
var_file=self.additional_tfvar_files)
|
||||
self.write_output(tf)
|
||||
self.write_output(terraform)
|
||||
self.post_build()
|
||||
self.print_terraform_command(tf)
|
||||
if (return_code > 0):
|
||||
self.print_terraform_command(terraform)
|
||||
if return_code > 0:
|
||||
raise Exception(return_code, "terraform error:", stderr)
|
||||
|
||||
def destroy(self, auto_approve=False):
|
||||
tf = self.init_client()
|
||||
terraform = self.init_client()
|
||||
if auto_approve:
|
||||
auto_approve_flag = IsFlagged
|
||||
else:
|
||||
auto_approve_flag = None
|
||||
if version.parse(self.terraform_semantic_version) >= version.parse("1.0.0"):
|
||||
return_code, stdout, stderr = tf.destroy(capture_output=False, raise_on_error=True,
|
||||
return_code, _, stderr = terraform.destroy(capture_output=False, raise_on_error=True,
|
||||
auto_approve=auto_approve_flag,
|
||||
var=self.project_vars(),
|
||||
var_file=self.additional_tfvar_files)
|
||||
else:
|
||||
return_code, stdout, stderr = tf.destroy(capture_output=False, raise_on_error=True,
|
||||
return_code, _, stderr = terraform.destroy(capture_output=False, raise_on_error=True,
|
||||
force=auto_approve_flag,
|
||||
var=self.project_vars(),
|
||||
var_file=self.additional_tfvar_files)
|
||||
self.post_build()
|
||||
self.print_terraform_command(tf)
|
||||
if (return_code > 0):
|
||||
self.print_terraform_command(terraform)
|
||||
if return_code > 0:
|
||||
raise Exception(return_code, "terraform error:", stderr)
|
||||
|
||||
def tf_import(self, tf_import_name, tf_import_resource,):
|
||||
tf = self.init_client()
|
||||
return_code, stdout, stderr = tf.import_cmd(tf_import_name, tf_import_resource,
|
||||
terraform = self.init_client()
|
||||
return_code, _, stderr = terraform.import_cmd(tf_import_name, tf_import_resource,
|
||||
capture_output=False, raise_on_error=True,
|
||||
var=self.project_vars(),
|
||||
var_file=self.additional_tfvar_files)
|
||||
self.post_build()
|
||||
self.print_terraform_command(tf)
|
||||
if (return_code > 0):
|
||||
self.print_terraform_command(terraform)
|
||||
if return_code > 0:
|
||||
raise Exception(return_code, "terraform error:", stderr)
|
||||
|
||||
def print_terraform_command(self, tf):
|
||||
def print_terraform_command(self, terraform):
|
||||
if self.debug_print_terraform_command:
|
||||
output = 'cd ' + self.build_path() + ' && ' + tf.latest_cmd()
|
||||
output = 'cd ' + self.build_path() + ' && ' + terraform.latest_cmd()
|
||||
print(output)
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
from dda_python_terraform import *
|
||||
from dda_python_terraform import Terraform
|
||||
from .digitalocean_terraform_build import DigitaloceanTerraformBuild
|
||||
|
||||
|
||||
def add_digitalocean_backend_properties_mixin_config(config,
|
||||
account_name,
|
||||
endpoint,
|
||||
|
@ -59,15 +58,15 @@ class DigitaloceanBackendPropertiesMixin(DigitaloceanTerraformBuild):
|
|||
pass
|
||||
|
||||
def init_client(self):
|
||||
tf = Terraform(working_dir=self.build_path(),
|
||||
terraform = Terraform(working_dir=self.build_path(),
|
||||
terraform_semantic_version=self.terraform_semantic_version)
|
||||
tf.init(backend_config=self.backend_config)
|
||||
self.print_terraform_command(tf)
|
||||
terraform.init(backend_config=self.backend_config)
|
||||
self.print_terraform_command(terraform)
|
||||
if self.use_workspace:
|
||||
try:
|
||||
tf.workspace('select', self.stage)
|
||||
self.print_terraform_command(tf)
|
||||
terraform.workspace('select', self.stage)
|
||||
self.print_terraform_command(terraform)
|
||||
except:
|
||||
tf.workspace('new', self.stage)
|
||||
self.print_terraform_command(tf)
|
||||
return tf
|
||||
terraform.workspace('new', self.stage)
|
||||
self.print_terraform_command(terraform)
|
||||
return terraform
|
||||
|
|
|
@ -15,9 +15,11 @@ def create_digitalocean_terraform_build_config(stage,
|
|||
build_commons_path=None,
|
||||
terraform_build_commons_dir_name='terraform',
|
||||
debug_print_terraform_command=False,
|
||||
additional_tfvar_files=[],
|
||||
additional_tfvar_files=None,
|
||||
terraform_semantic_version="1.0.8",
|
||||
):
|
||||
if not additional_tfvar_files:
|
||||
additional_tfvar_files = []
|
||||
config = create_devops_terraform_build_config(stage,
|
||||
project_root_path,
|
||||
module,
|
||||
|
|
|
@ -1,22 +1,21 @@
|
|||
from string import Template
|
||||
from subprocess import run
|
||||
from .python_util import *
|
||||
from .python_util import execute_live
|
||||
from .devops_build import DevopsBuild
|
||||
|
||||
|
||||
config_base = """
|
||||
CONFIG_BASE = """
|
||||
fqdn: $fqdn
|
||||
"""
|
||||
config_ipv4 = """node:
|
||||
CONFIG_IPV4 = """node:
|
||||
ipv4: $ipv4
|
||||
"""
|
||||
config_ipv6 = """ ipv6: $ipv6
|
||||
CONFIG_IPV6 = """ ipv6: $ipv6
|
||||
"""
|
||||
config_certmanager = """certmanager:
|
||||
CONFIG_CERTMANAGER = """certmanager:
|
||||
email: $letsencrypt_email
|
||||
letsencryptEndpoint: $letsencrypt_endpoint
|
||||
"""
|
||||
config_echo = """echo: $echo
|
||||
CONFIG_ECHO = """echo: $echo
|
||||
"""
|
||||
|
||||
|
||||
|
@ -31,16 +30,16 @@ def add_provs_k3s_mixin_config(config,
|
|||
ipv6=None,
|
||||
app_filename_to_provision=None):
|
||||
template_text = k3s_config_template
|
||||
if(template_text == None):
|
||||
template_text = config_base
|
||||
if(letsencrypt_endpoint != None):
|
||||
template_text += config_certmanager
|
||||
if(echo != None):
|
||||
template_text += config_echo
|
||||
if(ipv4 != None):
|
||||
template_text += config_ipv4
|
||||
if(ipv6 != None):
|
||||
template_text += config_ipv6
|
||||
if template_text is None:
|
||||
template_text = CONFIG_BASE
|
||||
if letsencrypt_endpoint is not None:
|
||||
template_text += CONFIG_CERTMANAGER
|
||||
if echo is not None:
|
||||
template_text += CONFIG_ECHO
|
||||
if ipv4 is not None:
|
||||
template_text += CONFIG_IPV4
|
||||
if ipv6 is not None:
|
||||
template_text += CONFIG_IPV6
|
||||
|
||||
config.update({'ProvsK3sMixin':
|
||||
{'fqdn': fqdn,
|
||||
|
@ -89,10 +88,10 @@ class ProvsK3sMixin(DevopsBuild):
|
|||
self.ipv6 = ipv6
|
||||
self.put('ipv6', ipv6)
|
||||
template_text = self.k3s_config_template_text
|
||||
if(ipv4 != None):
|
||||
template_text += config_ipv4
|
||||
if(ipv6 != None):
|
||||
template_text += config_ipv6
|
||||
if ipv4 is not None:
|
||||
template_text += CONFIG_IPV4
|
||||
if ipv6 is not None:
|
||||
template_text += CONFIG_IPV6
|
||||
self.k3s_config_template_text = template_text
|
||||
self.put('k3s_config_template_text', template_text)
|
||||
template = Template(template_text)
|
||||
|
@ -102,18 +101,14 @@ class ProvsK3sMixin(DevopsBuild):
|
|||
def write_provs_config(self):
|
||||
substitutes = self.get_keys(['fqdn', 'ipv4', 'ipv6', 'letsencrypt_email',
|
||||
'letsencrypt_endpoint', 'echo'])
|
||||
with open(self.build_path() + '/out_k3sServerConfig.yaml', "w") as output_file:
|
||||
with open(self.build_path() + '/out_k3sServerConfig.yaml', "w", encoding="utf-8") as output_file:
|
||||
output_file.write(self.k3s_config_template.substitute(substitutes))
|
||||
|
||||
def provs_server(self, dry_run=False):
|
||||
result = ''
|
||||
cmd = ['provs-server.jar', 'k3s', self.provision_user + '@' + self.fqdn, '-c',
|
||||
self.build_path() + '/out_k3sServerConfig.yaml',
|
||||
'-a', self.build_path() + '/' + self.app_filename_to_provision]
|
||||
prn_cmd = list(cmd)
|
||||
print(" ".join(prn_cmd))
|
||||
if (not dry_run):
|
||||
result = execute(cmd)
|
||||
print(result)
|
||||
|
||||
return result
|
||||
if dry_run:
|
||||
print(" ".join(cmd))
|
||||
else:
|
||||
execute_live(cmd)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from subprocess import check_output
|
||||
from subprocess import check_output, Popen, PIPE
|
||||
import sys
|
||||
|
||||
def execute(cmd, shell=False):
|
||||
|
@ -8,5 +8,12 @@ def execute(cmd, shell=False):
|
|||
output = check_output(cmd, shell=shell)
|
||||
return output.rstrip()
|
||||
|
||||
def filter_none(list):
|
||||
return [x for x in list if x is not None]
|
||||
def execute_live(cmd):
|
||||
process = Popen(cmd, stdout=PIPE)
|
||||
for line in iter(process.stdout.readline, b''):
|
||||
print(line.decode('utf-8'), end='')
|
||||
process.stdout.close()
|
||||
process.wait()
|
||||
|
||||
def filter_none(list_to_filter):
|
||||
return [x for x in list_to_filter if x is not None]
|
||||
|
|
0
src/test/__init__.py
Normal file
0
src/test/__init__.py
Normal file
48
src/test/test_c4k_mixin.py
Normal file
48
src/test/test_c4k_mixin.py
Normal file
|
@ -0,0 +1,48 @@
|
|||
import os
|
||||
from pybuilder.core import Project
|
||||
from src.main.python.ddadevops.c4k_mixin import C4kMixin, add_c4k_mixin_config
|
||||
|
||||
class MyC4kMixin(C4kMixin):
|
||||
pass
|
||||
|
||||
def test_c4k_mixin(tmp_path):
|
||||
|
||||
build_dir = 'build'
|
||||
project_name = 'testing-project'
|
||||
module_name = 'c4k-test'
|
||||
tmp_path_str = str(tmp_path)
|
||||
|
||||
project = Project(tmp_path_str, name=project_name)
|
||||
|
||||
project_config = {
|
||||
'stage': 'test',
|
||||
'project_root_path': tmp_path_str,
|
||||
'module': module_name,
|
||||
'build_dir_name': build_dir
|
||||
}
|
||||
|
||||
config = {'a': 1, 'b': 2}
|
||||
auth = {'c': 3, 'd': 4}
|
||||
|
||||
add_c4k_mixin_config(project_config, module_name, config, auth, grafana_cloud_user='user', grafana_cloud_password='password')
|
||||
|
||||
assert project_config.get('C4kMixin') is not None
|
||||
assert project_config.get('C4kMixin').get('Name') is module_name
|
||||
assert project_config.get('C4kMixin').get('Config') is config
|
||||
assert project_config.get('C4kMixin').get('Auth') is auth
|
||||
|
||||
mixin = MyC4kMixin(project, project_config)
|
||||
mixin.initialize_build_dir()
|
||||
assert mixin.build_path() == f'{tmp_path_str}/{build_dir}/{project_name}/{module_name}'
|
||||
|
||||
mixin.put('fqdn', 'testing.test')
|
||||
|
||||
mixin.write_c4k_config()
|
||||
assert 'fqdn' in mixin.c4k_mixin_config
|
||||
assert 'mon-config' in mixin.c4k_mixin_config
|
||||
assert os.path.exists(f'{mixin.build_path()}/out_config.edn')
|
||||
|
||||
mixin.write_c4k_auth()
|
||||
assert 'mon-auth' in mixin.c4k_mixin_auth
|
||||
assert os.path.exists(f'{mixin.build_path()}/out_auth.edn')
|
||||
|
Loading…
Reference in a new issue