Compare commits
289 Commits
verbose-er
...
main
@ -1,42 +0,0 @@
|
||||
name: stable
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '[0-9]+.[0-9]+.[0-9]+'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: stable build
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Use python 3.x
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
|
||||
- name: build stable release
|
||||
env:
|
||||
TWINE_USERNAME: __token__
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_DDA }}
|
||||
run: |
|
||||
pyb -P version=${{ github.ref }} publish upload
|
||||
|
||||
- name: Create GH Release
|
||||
id: create_release
|
||||
uses: actions/create-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag_name: ${{ github.ref }}
|
||||
release_name: Release ${{ github.ref }}
|
||||
draft: false
|
||||
prerelease: false
|
||||
|
@ -1,30 +0,0 @@
|
||||
name: unstable
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '![0-9]+.[0-9]+.[0-9]+'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: unstable
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Use python 3.x
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
|
||||
- name: build unstable release
|
||||
env:
|
||||
TWINE_USERNAME: __token__
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_DDA }}
|
||||
run: |
|
||||
pyb publish upload
|
@ -1,52 +1,96 @@
|
||||
image: "domaindrivenarchitecture/devops-build:4.0.8"
|
||||
|
||||
before_script:
|
||||
- python --version
|
||||
- python -m pip install --upgrade pip
|
||||
- pip install -r requirements.txt
|
||||
- export IMAGE_TAG=$CI_IMAGE_TAG
|
||||
- export IMAGE_DOCKERHUB_USER=$DOCKERHUB_USER
|
||||
- export IMAGE_DOCKERHUB_PASSWORD=$DOCKERHUB_PASSWORD
|
||||
|
||||
stages:
|
||||
- lint&test
|
||||
- upload
|
||||
- image
|
||||
|
||||
.py: &py
|
||||
image: "domaindrivenarchitecture/ddadevops-python:4.10.7"
|
||||
before_script:
|
||||
- export RELEASE_ARTIFACT_TOKEN=$MEISSA_REPO_BUERO_RW
|
||||
- python --version
|
||||
- pip install -r requirements.txt
|
||||
|
||||
.img: &img
|
||||
image: "domaindrivenarchitecture/ddadevops-dind:4.10.7"
|
||||
services:
|
||||
- docker:dind
|
||||
before_script:
|
||||
- export IMAGE_DOCKERHUB_USER=$DOCKERHUB_USER
|
||||
- export IMAGE_DOCKERHUB_PASSWORD=$DOCKERHUB_PASSWORD
|
||||
- export IMAGE_TAG=$CI_COMMIT_TAG
|
||||
|
||||
.tag_only: &tag_only
|
||||
rules:
|
||||
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
||||
when: never
|
||||
- if: '$CI_COMMIT_TAG =~ /^[0-9]+\.[0-9]+\.[0-9]+$/'
|
||||
|
||||
lint:
|
||||
<<: *py
|
||||
stage: lint&test
|
||||
script:
|
||||
- pip install -r dev_requirements.txt
|
||||
- pyb lint
|
||||
|
||||
pytest:
|
||||
<<: *py
|
||||
stage: lint&test
|
||||
script:
|
||||
- pip install -r dev_requirements.txt
|
||||
- pyb test
|
||||
|
||||
pypi-stable:
|
||||
<<: *py
|
||||
<<: *tag_only
|
||||
stage: upload
|
||||
rules:
|
||||
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
||||
when: never
|
||||
- if: '$CI_COMMIT_TAG =~ /^[0-9]+\.[0-9]+\.[0-9]+$/'
|
||||
script:
|
||||
- pyb -P version=$CI_COMMIT_TAG publish upload
|
||||
- pyb -P version=$CI_COMMIT_TAG publish upload publish_artifacts
|
||||
|
||||
clojure-image-test-publish:
|
||||
clj-cljs-image-publish:
|
||||
<<: *img
|
||||
<<: *tag_only
|
||||
stage: image
|
||||
rules:
|
||||
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
||||
when: never
|
||||
- if: '$CI_COMMIT_TAG =~ /^[0-9]+\.[0-9]+\.[0-9]+$/'
|
||||
script:
|
||||
- cd infrastructure/clojure && pyb image test publish
|
||||
- cd infrastructure/clj-cljs && pyb image publish
|
||||
|
||||
devops-build-image-test-publish:
|
||||
clj-image-publish:
|
||||
<<: *img
|
||||
<<: *tag_only
|
||||
stage: image
|
||||
script:
|
||||
- cd infrastructure/clj && pyb image publish
|
||||
|
||||
python-image-publish:
|
||||
<<: *img
|
||||
<<: *tag_only
|
||||
stage: image
|
||||
script:
|
||||
- cd infrastructure/python && pyb image publish
|
||||
|
||||
dind-image-publish:
|
||||
<<: *img
|
||||
<<: *tag_only
|
||||
stage: image
|
||||
script:
|
||||
- cd infrastructure/dind && pyb image publish
|
||||
|
||||
ddadevops-image-publish:
|
||||
<<: *img
|
||||
<<: *tag_only
|
||||
stage: image
|
||||
script:
|
||||
- cd infrastructure/ddadevops && pyb image publish
|
||||
|
||||
kotlin-image-publish:
|
||||
<<: *img
|
||||
<<: *tag_only
|
||||
stage: image
|
||||
script:
|
||||
- cd infrastructure/kotlin && pyb image publish
|
||||
|
||||
backup-image-publish:
|
||||
<<: *img
|
||||
<<: *tag_only
|
||||
stage: image
|
||||
rules:
|
||||
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
||||
when: never
|
||||
- if: '$CI_COMMIT_TAG =~ /^[0-9]+\.[0-9]+\.[0-9]+$/'
|
||||
script:
|
||||
- cd infrastructure/devops-build && pyb image test publish
|
||||
- cd infrastructure/backup && pyb image publish
|
||||
|
@ -0,0 +1,33 @@
|
||||
# ddadevops Images
|
||||
## ddadevops-clojure
|
||||
|
||||
Contains
|
||||
* clojure
|
||||
* shadowcljs
|
||||
* lein
|
||||
* java
|
||||
* graalvm
|
||||
* pybuilder, ddadevops
|
||||
|
||||
## ddadevops
|
||||
|
||||
Contains:
|
||||
* pybuilder, ddadevops
|
||||
|
||||
## devops-build
|
||||
|
||||
Image is deprecated.
|
||||
|
||||
## ddadevops-dind
|
||||
|
||||
Contains:
|
||||
* docker in docker
|
||||
* pybuilder, ddadevops
|
||||
|
||||
## ddadevops-python
|
||||
|
||||
Contains:
|
||||
* python 3.10
|
||||
* python linting
|
||||
* python setup-tools
|
||||
* pybuilder, ddadevops
|
@ -1,8 +0,0 @@
|
||||
adjust version no in build.py to release version no.
|
||||
git commit -am "release"
|
||||
git tag -am "release" [release version no]
|
||||
git push --follow-tags
|
||||
increase version no in build.py
|
||||
git commit -am "version bump"
|
||||
git push
|
||||
pip3 install --upgrade ddadevops
|
@ -0,0 +1,51 @@
|
||||
from os import environ
|
||||
from datetime import datetime
|
||||
from pybuilder.core import task, init
|
||||
from ddadevops import *
|
||||
import logging
|
||||
|
||||
name = 'dda-backup'
|
||||
MODULE = 'NOT_SET'
|
||||
PROJECT_ROOT_PATH = '../..'
|
||||
version = "4.12.1-dev"
|
||||
|
||||
|
||||
@init
|
||||
def initialize(project):
|
||||
image_tag = version
|
||||
if "dev" in image_tag:
|
||||
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
|
||||
|
||||
input = {
|
||||
"name": name,
|
||||
"module": MODULE,
|
||||
"stage": "notused",
|
||||
"project_root_path": PROJECT_ROOT_PATH,
|
||||
"build_types": ["IMAGE"],
|
||||
"mixin_types": [],
|
||||
"image_naming": "NAME_ONLY",
|
||||
"image_tag": f"{image_tag}",
|
||||
}
|
||||
|
||||
project.build_depends_on("ddadevops>=4.7.0")
|
||||
|
||||
build = DevopsImageBuild(project, input)
|
||||
build.initialize_build_dir()
|
||||
|
||||
|
||||
@task
|
||||
def image(project):
|
||||
build = get_devops_build(project)
|
||||
build.image()
|
||||
|
||||
@task
|
||||
def drun(project):
|
||||
build = get_devops_build(project)
|
||||
build.drun()
|
||||
|
||||
|
||||
@task
|
||||
def publish(project):
|
||||
build = get_devops_build(project)
|
||||
build.dockerhub_login()
|
||||
build.dockerhub_publish()
|
@ -0,0 +1,79 @@
|
||||
## Init Statemachine
|
||||
|
||||
### Inputs
|
||||
1. `restic-password: ""`
|
||||
2. `restic-password-to-rotate: ""`
|
||||
|
||||
### Manual init the restic repository for the first time
|
||||
|
||||
1. apply backup-and-restore pod:
|
||||
`kubectl scale deployment backup-restore --replicas=1`
|
||||
2. exec into pod and execute restore pod (press tab to get your exact pod name)
|
||||
`kubectl exec -it backup-restore-... -- /usr/local/bin/init.sh`
|
||||
3. remove backup-and-restore pod:
|
||||
`kubectl scale deployment backup-restore --replicas=0`
|
||||
|
||||
### Password Rotation
|
||||
|
||||
1. apply backup-and-restore pod:
|
||||
`kubectl scale deployment backup-restore --replicas=1`
|
||||
2. add new password to restic repository
|
||||
`restic key add ....`
|
||||
=> Trigger ::
|
||||
field (1) credential current
|
||||
filed (2) credential new
|
||||
3. replace field (1) with (2) & clear (2)
|
||||
4. remove old key - ???
|
||||
`restic remove ....`
|
||||
|
||||
|
||||
```mermaid
|
||||
stateDiagram-v2
|
||||
[*] --> init
|
||||
init --> backup_ready: trigger, restic-password !empty
|
||||
backup_ready --> new_password_added: restic-password !empty && restic-password-to-rotate !empty
|
||||
new_password_added --> backup_ready: restic-password !empty && restic-password-to-rotate empty
|
||||
```
|
||||
|
||||
### First Steps
|
||||
|
||||
1. Cloud Testserver hochfahren
|
||||
2. Dort backup-restore deployment (leeres Secret mgl.?), neues Secret "rotation-credential-secret" als Daten
|
||||
3. mounten von angelegtem Secret in Pod backup-restore
|
||||
4. ba*bash*ka Skript in pod starten -> liest Secret ?leer
|
||||
5. Micha cons.
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant k8s
|
||||
participant e as entrypoint.sh
|
||||
participant rm as restic-management.clj
|
||||
|
||||
k8s ->> e: cronjob calls
|
||||
e ->> rm: start-file
|
||||
rm ->> rm: rotate
|
||||
activate rm
|
||||
rm ->> rm: read-backup-repository-state (state)
|
||||
rm ->> rm: read-secret (backup-secret/restic-password, rotation-credential-secret/rotation-credential)
|
||||
rm ->> rm: switch
|
||||
activate rm
|
||||
rm ->> rm: if init && restic-password != null
|
||||
activate rm
|
||||
rm ->> rm: init.sh
|
||||
rm ->> rm: state init -> backup-ready
|
||||
deactivate rm
|
||||
rm ->> rm: if backup-ready && rotation-credential != null
|
||||
activate rm
|
||||
rm ->> rm: add-new-password-to-restic-repository.sh
|
||||
rm ->> rm: state backup-ready -> new-password-added
|
||||
deactivate rm
|
||||
rm ->> rm: if new-password-added && rotation-credential == null
|
||||
activate rm
|
||||
rm ->> rm: remove-old-password-from-restic-repository.sh
|
||||
rm ->> rm: state new-password-added -> backup-ready
|
||||
deactivate rm
|
||||
deactivate rm
|
||||
|
||||
rm ->> rm: store-repository-state (state)
|
||||
deactivate rm
|
||||
```
|
@ -0,0 +1,5 @@
|
||||
FROM ubuntu:jammy
|
||||
|
||||
# install it
|
||||
ADD resources /tmp/
|
||||
RUN /tmp/install.sh
|
@ -0,0 +1,69 @@
|
||||
backup_file_path='files'
|
||||
|
||||
function init-file-repo() {
|
||||
if [ -z ${CERTIFICATE_FILE} ];
|
||||
then
|
||||
restic -r ${RESTIC_REPOSITORY}/${backup_file_path} -v init
|
||||
else
|
||||
restic -r ${RESTIC_REPOSITORY}/${backup_file_path} -v init --cacert ${CERTIFICATE_FILE}
|
||||
fi
|
||||
}
|
||||
|
||||
# First arg is the directory, second is optional for the path to a certificate file
|
||||
function backup-directory() {
|
||||
local directory="$1"; shift
|
||||
|
||||
if [ -z ${CERTIFICATE_FILE} ];
|
||||
then
|
||||
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache
|
||||
cd ${directory} && restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} backup .
|
||||
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune
|
||||
else
|
||||
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache --cacert ${CERTIFICATE_FILE}
|
||||
cd ${directory} && restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} backup . --cacert ${CERTIFICATE_FILE}
|
||||
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune --cacert ${CERTIFICATE_FILE}
|
||||
fi
|
||||
}
|
||||
|
||||
# First arg is the directory, the remaining args are the sub-directories (relative to the first directory) to backup.
|
||||
function backup-fs-from-directory() {
|
||||
local directory="$1"; shift
|
||||
|
||||
if [ -z ${CERTIFICATE_FILE} ];
|
||||
then
|
||||
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache
|
||||
cd ${directory} && restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} backup $@
|
||||
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune
|
||||
else
|
||||
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache --cacert ${CERTIFICATE_FILE}
|
||||
cd ${directory} && restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} backup $@ --cacert ${CERTIFICATE_FILE}
|
||||
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune --cacert ${CERTIFICATE_FILE}
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
function restore-directory() {
|
||||
local directory="$1"; shift
|
||||
local snapshot_id="${1:-latest}"; shift
|
||||
|
||||
if [ -z ${CERTIFICATE_FILE} ];
|
||||
then
|
||||
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache
|
||||
rm -rf ${directory}*
|
||||
restic -v -r $RESTIC_REPOSITORY/${backup_file_path} restore ${snapshot_id} --target ${directory}
|
||||
else
|
||||
restic -v -r ${RESTIC_REPOSITORY}/${backup_file_path} unlock --cleanup-cache --cacert ${CERTIFICATE_FILE}
|
||||
rm -rf ${directory}*
|
||||
restic -v -r $RESTIC_REPOSITORY/${backup_file_path} restore ${snapshot_id} --target ${directory} --cacert ${CERTIFICATE_FILE}
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
function list-snapshot-files() {
|
||||
if [ -z ${CERTIFICATE_FILE} ];
|
||||
then
|
||||
restic -r ${RESTIC_REPOSITORY}/${backup_file_path} snapshots
|
||||
else
|
||||
restic -r ${RESTIC_REPOSITORY}/${backup_file_path} snapshots --cacert ${CERTIFICATE_FILE}
|
||||
fi
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
# usage: file_env VAR [DEFAULT]
|
||||
# ie: file_env 'XYZ_DB_PASSWORD' 'example'
|
||||
# (will allow for "$XYZ_DB_PASSWORD_FILE" to fill in the value of
|
||||
# "$XYZ_DB_PASSWORD" from a file, especially for Docker's secrets feature)
|
||||
function file_env() {
|
||||
local var="$1"
|
||||
local fileVar="${var}_FILE"
|
||||
local def="${2:-}"
|
||||
if [ "${!var:-}" ] && [ "${!fileVar:-}" ]; then
|
||||
echo >&2 "error: both $var and $fileVar are set (but are exclusive)"
|
||||
exit 1
|
||||
fi
|
||||
local val="$def"
|
||||
if [ "${!var:-}" ]; then
|
||||
val="${!var}"
|
||||
elif [ "${!fileVar:-}" ]; then
|
||||
val="$(< "${!fileVar}")"
|
||||
fi
|
||||
export "$var"="$val"
|
||||
unset "$fileVar"
|
||||
}
|
@ -0,0 +1,36 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -exo pipefail
|
||||
|
||||
function babashka_install() {
|
||||
babashka_version="1.3.189"
|
||||
curl -SsLo /tmp/babashka-${babashka_version}-linux-amd64.tar.gz https://github.com/babashka/babashka/releases/download/v${babashka_version}/babashka-${babashka_version}-linux-amd64.tar.gz
|
||||
curl -SsLo /tmp/checksum https://github.com/babashka/babashka/releases/download/v${babashka_version}/babashka-${babashka_version}-linux-amd64.tar.gz.sha256
|
||||
echo " /tmp/babashka-$babashka_version-linux-amd64.tar.gz"|tee -a /tmp/checksum
|
||||
sha256sum -c --status /tmp/checksum
|
||||
tar -C /tmp -xzf /tmp/babashka-${babashka_version}-linux-amd64.tar.gz
|
||||
install -m 0700 -o root -g root /tmp/bb /usr/local/bin/
|
||||
}
|
||||
|
||||
function main() {
|
||||
{
|
||||
upgradeSystem
|
||||
apt-get install -qqy ca-certificates curl gnupg postgresql-client-14 restic
|
||||
curl -Ss --fail https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /etc/apt/trusted.gpg.d/postgresql-common_pgdg_archive_keyring.gpg
|
||||
sh -c 'echo "deb [signed-by=/etc/apt/trusted.gpg.d/postgresql-common_pgdg_archive_keyring.gpg] https://apt.postgresql.org/pub/repos/apt jammy-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
|
||||
upgradeSystem
|
||||
babashka_install
|
||||
} > /dev/null
|
||||
|
||||
update-ca-certificates
|
||||
|
||||
install -m 0400 /tmp/functions.sh /usr/local/lib/
|
||||
install -m 0400 /tmp/pg-functions.sh /usr/local/lib/
|
||||
install -m 0400 /tmp/file-functions.sh /usr/local/lib/
|
||||
install -m 0740 /tmp/restic_management.clj /usr/local/bin/
|
||||
|
||||
cleanupDocker
|
||||
}
|
||||
|
||||
source /tmp/install_functions_debian.sh
|
||||
DEBIAN_FRONTEND=noninteractive DEBCONF_NOWARNINGS=yes main
|
@ -0,0 +1,149 @@
|
||||
backup_pg_role_path='pg-role'
|
||||
backup_pg_database_path='pg-database'
|
||||
|
||||
function init-command() {
|
||||
restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} -v init $@
|
||||
}
|
||||
|
||||
function init-role-repo() {
|
||||
|
||||
if [ -z ${CERTIFICATE_FILE} ];
|
||||
then
|
||||
init-command
|
||||
else
|
||||
init-command --cacert ${CERTIFICATE_FILE}
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
function init-database-command() {
|
||||
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} -v init $@
|
||||
}
|
||||
|
||||
function init-database-repo() {
|
||||
|
||||
if [ -z ${CERTIFICATE_FILE} ];
|
||||
then
|
||||
init-database-command
|
||||
else
|
||||
init-database-command --cacert ${CERTIFICATE_FILE}
|
||||
fi
|
||||
}
|
||||
|
||||
function drop-create-db() {
|
||||
psql -d template1 -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
|
||||
--no-password -c "DROP DATABASE \"${POSTGRES_DB}\";"
|
||||
psql -d template1 -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
|
||||
--no-password -c "CREATE DATABASE \"${POSTGRES_DB}\";"
|
||||
}
|
||||
|
||||
function create-pg-pass() {
|
||||
local pg_host=${POSTGRES_HOST:-localhost}
|
||||
|
||||
echo "${pg_host}:${POSTGRES_DB}:${POSTGRES_USER}:${POSTGRES_PASSWORD}" > /root/.pgpass
|
||||
echo "${POSTGRES_HOST}:template1:${POSTGRES_USER}:${POSTGRES_PASSWORD}" >> /root/.pgpass
|
||||
chmod 0600 /root/.pgpass
|
||||
}
|
||||
|
||||
function roles-unlock-command() {
|
||||
restic -v -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} unlock --cleanup-cache $@
|
||||
}
|
||||
|
||||
function roles-forget-command() {
|
||||
restic -v -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune $@
|
||||
}
|
||||
|
||||
function backup-roles() {
|
||||
local role_prefix="$1"; shift
|
||||
|
||||
if [ -z ${CERTIFICATE_FILE} ];
|
||||
then
|
||||
roles-unlock-command
|
||||
pg_dumpall -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U${POSTGRES_USER} --no-password --roles-only | \
|
||||
grep ${role_prefix} | restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} backup --stdin
|
||||
roles-forget-command
|
||||
else
|
||||
roles-unlock-command --cacert ${CERTIFICATE_FILE}
|
||||
pg_dumpall -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U${POSTGRES_USER} --no-password --roles-only | \
|
||||
grep ${role_prefix} | restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} backup --stdin --cacert ${CERTIFICATE_FILE}
|
||||
roles-forget-command --cacert ${CERTIFICATE_FILE}
|
||||
fi
|
||||
}
|
||||
|
||||
function db-unlock-command() {
|
||||
restic -v -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} unlock --cleanup-cache $@
|
||||
}
|
||||
|
||||
function db-forget-command() {
|
||||
restic -v -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} forget --group-by '' --keep-last 1 --keep-daily ${RESTIC_DAYS_TO_KEEP} --keep-monthly ${RESTIC_MONTHS_TO_KEEP} --prune $@
|
||||
}
|
||||
|
||||
function backup-db-dump() {
|
||||
|
||||
if [ -z ${CERTIFICATE_FILE} ];
|
||||
then
|
||||
db-unlock-command
|
||||
pg_dump -d ${POSTGRES_DB} -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} \
|
||||
-U ${POSTGRES_USER} --no-password --serializable-deferrable | \
|
||||
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} backup --stdin
|
||||
db-forget-command
|
||||
else
|
||||
db-unlock-command --cacert ${CERTIFICATE_FILE}
|
||||
pg_dump -d ${POSTGRES_DB} -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} \
|
||||
-U ${POSTGRES_USER} --no-password --serializable-deferrable | \
|
||||
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} backup --stdin --cacert ${CERTIFICATE_FILE}
|
||||
db-forget-command --cacert ${CERTIFICATE_FILE}
|
||||
fi
|
||||
}
|
||||
|
||||
function restore-roles() {
|
||||
local snapshot_id="${1:-latest}"; shift
|
||||
|
||||
if [ -z ${CERTIFICATE_FILE} ];
|
||||
then
|
||||
roles-unlock-command
|
||||
restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} dump ${snapshot_id} stdin | \
|
||||
psql -d template1 -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
|
||||
--no-password
|
||||
else
|
||||
roles-unlock-command --cacert ${CERTIFICATE_FILE}
|
||||
restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} dump ${snapshot_id} stdin --cacert ${CERTIFICATE_FILE} | \
|
||||
psql -d template1 -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
|
||||
--no-password
|
||||
fi
|
||||
}
|
||||
|
||||
function restore-db() {
|
||||
local snapshot_id="${1:-latest}"; shift
|
||||
|
||||
if [ -z ${CERTIFICATE_FILE} ];
|
||||
then
|
||||
db-unlock-command
|
||||
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} dump ${snapshot_id} stdin | \
|
||||
psql -d ${POSTGRES_DB} -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
|
||||
--no-password
|
||||
else
|
||||
db-unlock-command --cacert ${CERTIFICATE_FILE}
|
||||
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} dump ${snapshot_id} stdin --cacert ${CERTIFICATE_FILE} | \
|
||||
psql -d ${POSTGRES_DB} -h ${POSTGRES_SERVICE} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} \
|
||||
--no-password
|
||||
fi
|
||||
}
|
||||
|
||||
function list-snapshot-roles() {
|
||||
if [ -z ${CERTIFICATE_FILE} ];
|
||||
then
|
||||
restic -r ${RESTIC_REPOSITORY}/${backup_pg_role_path} snapshots
|
||||
else
|
||||
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} snapshots --cacert ${CERTIFICATE_FILE}
|
||||
fi
|
||||
}
|
||||
|
||||
function list-snapshot-db() {
|
||||
if [ -z ${CERTIFICATE_FILE} ];
|
||||
then
|
||||
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} snapshots
|
||||
else
|
||||
restic -r ${RESTIC_REPOSITORY}/${backup_pg_database_path} snapshots --cacert ${CERTIFICATE_FILE}
|
||||
fi
|
||||
}
|
@ -0,0 +1,51 @@
|
||||
#! /usr/bin/env bb
|
||||
|
||||
(ns restic-management
|
||||
(:require
|
||||
[clojure.spec.alpha :as s]
|
||||
[clojure.java.io :as io]
|
||||
[clojure.edn :as edn]))
|
||||
|
||||
(s/def ::state string?)
|
||||
|
||||
(s/def ::backup-repository-state
|
||||
(s/keys :req-un [::state]))
|
||||
|
||||
(def state {:state ""})
|
||||
|
||||
(defn store-backup-repository-state [s]
|
||||
(spit "backup-repository-state.edn" s))
|
||||
|
||||
(defn read-backup-repository-state []
|
||||
(try
|
||||
(with-open [r (io/reader "backup-repository-state.edn")]
|
||||
(edn/read (java.io.PushbackReader. r)))
|
||||
|
||||
(catch java.io.IOException e
|
||||
(printf "Couldn't open '%s': %s\n" "backup-repository-state.edn" (.getMessage e)))
|
||||
(catch RuntimeException e
|
||||
(printf "Error parsing edn file '%s': %s\n" "backup-repository-state.edn" (.getMessage e)))))
|
||||
|
||||
(defn read-secret [s]
|
||||
(slurp (str "/var/run/secrets/" s)))
|
||||
;"/var/run/secrets/rotation-credential-secret/rotation-credential"))
|
||||
|
||||
;(println (read-backup-repository-state))
|
||||
|
||||
;(println (:state (read-backup-repository-state)))
|
||||
|
||||
;(println (s/valid? ::backup-repository-state (read-backup-repository-state)))
|
||||
|
||||
(println (read-secret "rotation-credential-secret/rotation-credential"))
|
||||
(println (read-secret "backup-secrets/restic-password"))
|
||||
|
||||
(s/def ::new-password string?)
|
||||
(s/def ::old-password string?)
|
||||
(s/def ::password-state
|
||||
(s/keys :req-un [::new-password ::old-password]))
|
||||
|
||||
(defn rotate []
|
||||
(let [state {:new-password (read-secret "rotation-credential-secret/rotation-credential")
|
||||
:old-password (read-secret "backup-secrets/restic-password")}]
|
||||
(store-backup-repository-state (prn-str state))))
|
||||
(rotate)
|
@ -0,0 +1,56 @@
|
||||
from os import environ
|
||||
from datetime import datetime
|
||||
from pybuilder.core import task, init
|
||||
from ddadevops import *
|
||||
|
||||
name = "ddadevops"
|
||||
MODULE = "clj-cljs"
|
||||
PROJECT_ROOT_PATH = "../.."
|
||||
version = "4.12.1-dev"
|
||||
|
||||
@init
|
||||
def initialize(project):
|
||||
image_tag = version
|
||||
if "dev" in image_tag:
|
||||
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
|
||||
|
||||
input = {
|
||||
"name": name,
|
||||
"module": MODULE,
|
||||
"stage": "notused",
|
||||
"project_root_path": PROJECT_ROOT_PATH,
|
||||
"build_types": ["IMAGE"],
|
||||
"mixin_types": [],
|
||||
"image_naming": "NAME_AND_MODULE",
|
||||
"image_tag": f"{image_tag}",
|
||||
}
|
||||
|
||||
project.build_depends_on("ddadevops>=4.0.0")
|
||||
|
||||
build = DevopsImageBuild(project, input)
|
||||
build.initialize_build_dir()
|
||||
|
||||
|
||||
@task
|
||||
def image(project):
|
||||
build = get_devops_build(project)
|
||||
build.image()
|
||||
|
||||
|
||||
@task
|
||||
def drun(project):
|
||||
build = get_devops_build(project)
|
||||
build.drun()
|
||||
|
||||
|
||||
@task
|
||||
def test(project):
|
||||
build = get_devops_build(project)
|
||||
build.test()
|
||||
|
||||
|
||||
@task
|
||||
def publish(project):
|
||||
build = get_devops_build(project)
|
||||
build.dockerhub_login()
|
||||
build.dockerhub_publish()
|
@ -0,0 +1,4 @@
|
||||
FROM node:lts-bookworm-slim
|
||||
|
||||
ADD resources /tmp
|
||||
RUN /tmp/install.sh
|
@ -0,0 +1,45 @@
|
||||
#!/bin/bash
|
||||
set -exo pipefail
|
||||
|
||||
function main() {
|
||||
{
|
||||
upgradeSystem
|
||||
|
||||
mkdir -p /usr/share/man/man1
|
||||
apt-get -qqy install curl openjdk-17-jre-headless leiningen
|
||||
|
||||
# shadow-cljs
|
||||
npm install -g npm
|
||||
npm install -g --save-dev shadow-cljs
|
||||
|
||||
# download kubeconform & graalvm
|
||||
kubeconform_version="0.6.4"
|
||||
|
||||
curl -SsLo /tmp/kubeconform-linux-amd64.tar.gz https://github.com/yannh/kubeconform/releases/download/v${kubeconform_version}/kubeconform-linux-amd64.tar.gz
|
||||
curl -SsLo /tmp/CHECKSUMS https://github.com/yannh/kubeconform/releases/download/v${kubeconform_version}/CHECKSUMS
|
||||
|
||||
# checksum kubeconform
|
||||
checksum
|
||||
|
||||
# install kubeconform
|
||||
tar -C /usr/local/bin -xf /tmp/kubeconform-linux-amd64.tar.gz --exclude=LICENSE
|
||||
|
||||
#install pyb
|
||||
apt-get -qqy install python3 python3-pip git
|
||||
pip3 install pybuilder 'ddadevops>=4.7.0' deprecation dda-python-terraform boto3 pyyaml inflection --break-system-packages
|
||||
|
||||
#check
|
||||
lein --help
|
||||
|
||||
cleanupDocker
|
||||
} > /dev/null
|
||||
}
|
||||
|
||||
function checksum() {
|
||||
awk '{print $1 " /tmp/" $2}' /tmp/CHECKSUMS|sed -n '2p' > /tmp/kubeconform-checksum
|
||||
cat /tmp/kubeconform-checksum
|
||||
sha256sum -c --status /tmp/kubeconform-checksum
|
||||
}
|
||||
|
||||
source /tmp/install_functions_debian.sh
|
||||
DEBIAN_FRONTEND=noninteractive DEBCONF_NOWARNINGS=yes main
|
@ -0,0 +1,6 @@
|
||||
FROM debian:stable-slim
|
||||
|
||||
ADD resources /tmp
|
||||
RUN /tmp/install.sh
|
||||
ENV LANG=en_US.UTF-8 \
|
||||
JAVA_HOME=/usr/lib/jvm/graalvm
|
@ -0,0 +1,57 @@
|
||||
#!/bin/bash
|
||||
set -exo pipefail
|
||||
|
||||
function main() {
|
||||
{
|
||||
upgradeSystem
|
||||
|
||||
apt-get -qqy install curl git openjdk-17-jre-headless leiningen build-essential libz-dev zlib1g-dev
|
||||
|
||||
|
||||
# download kubeconform & graalvm
|
||||
kubeconform_version="0.6.4"
|
||||
graalvm_jdk_version="21.0.2"
|
||||
|
||||
curl -SsLo /tmp/kubeconform-linux-amd64.tar.gz https://github.com/yannh/kubeconform/releases/download/v${kubeconform_version}/kubeconform-linux-amd64.tar.gz
|
||||
curl -SsLo /tmp/CHECKSUMS https://github.com/yannh/kubeconform/releases/download/v${kubeconform_version}/CHECKSUMS
|
||||
curl -SsLo /tmp/graalvm-community-jdk.tar.gz https://github.com/graalvm/graalvm-ce-builds/releases/download/jdk-${graalvm_jdk_version}/graalvm-community-jdk-${graalvm_jdk_version}_linux-x64_bin.tar.gz
|
||||
curl -SsLo /tmp/graalvm-checksum https://github.com/graalvm/graalvm-ce-builds/releases/download/jdk-${graalvm_jdk_version}/graalvm-community-jdk-${graalvm_jdk_version}_linux-x64_bin.tar.gz.sha256
|
||||
|
||||
# checksum kubeconform & graalvm-jdk
|
||||
checksum
|
||||
|
||||
# install kubeconform
|
||||
tar -C /usr/local/bin -xf /tmp/kubeconform-linux-amd64.tar.gz --exclude=LICENSE
|
||||
|
||||
# install graalvm
|
||||
tar -C /usr/lib/jvm/ -xf /tmp/graalvm-community-jdk.tar.gz
|
||||
dirname_graalvm=$(ls /usr/lib/jvm/|grep -e graa)
|
||||
ln -s /usr/lib/jvm/$dirname_graalvm /usr/lib/jvm/graalvm
|
||||
ln -s /usr/lib/jvm/graalvm/bin/gu /usr/local/bin
|
||||
update-alternatives --install /usr/bin/java java /usr/lib/jvm/graalvm/bin/java 2
|
||||
ln -s /usr/lib/jvm/graalvm/bin/native-image /usr/local/bin
|
||||
|
||||
#install pyb
|
||||
apt-get -qqy install python3 python3-pip
|
||||
pip3 install pybuilder 'ddadevops>=4.7.0' deprecation dda-python-terraform boto3 pyyaml inflection --break-system-packages
|
||||
|
||||
#check
|
||||
native-image --version
|
||||
lein -v
|
||||
|
||||
cleanupDocker
|
||||
} > /dev/null
|
||||
}
|
||||
|
||||
function checksum() {
|
||||
#kubeconform
|
||||
awk '{print $1 " /tmp/" $2}' /tmp/CHECKSUMS|sed -n '2p' > /tmp/kubeconform-checksum
|
||||
sha256sum -c --status /tmp/kubeconform-checksum
|
||||
|
||||
#graalvm
|
||||
echo " /tmp/graalvm-community-jdk.tar.gz"|tee -a /tmp/graalvm-checksum
|
||||
sha256sum -c --status /tmp/graalvm-checksum
|
||||
}
|
||||
|
||||
source /tmp/install_functions_debian.sh
|
||||
DEBIAN_FRONTEND=noninteractive DEBCONF_NOWARNINGS=yes main
|
@ -1,4 +0,0 @@
|
||||
FROM node:lts-buster-slim
|
||||
|
||||
ADD resources /tmp
|
||||
RUN /tmp/install.sh
|
@ -1,2 +0,0 @@
|
||||
d7a5cb848b783c15119316d716d8a74bf11c9e3ab050f3adf28e0678a6018467 kubeconform-v0.4.7.tar.gz
|
||||
bbd3e03025168172a76c2a29e6a14c1c37e3476b30774259c3ef5952fb86f470 graalvm-ce-java11-linux-amd64-21.2.0.tar.gz
|
@ -1,43 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -eux
|
||||
|
||||
function main() {
|
||||
upgradeSystem
|
||||
|
||||
mkdir -p /usr/share/man/man1
|
||||
apt -qqy install openjdk-11-jre-headless leiningen curl build-essential libz-dev zlib1g-dev
|
||||
|
||||
# shadow-cljs
|
||||
npm install -g --save-dev shadow-cljs
|
||||
|
||||
# download kubeconform & graalvm
|
||||
curl -Lo /tmp/kubeconform-v0.4.7.tar.gz https://github.com/yannh/kubeconform/releases/download/v0.4.7/kubeconform-linux-amd64.tar.gz
|
||||
curl -Lo /tmp/graalvm-ce-java11-linux-amd64-21.2.0.tar.gz https://github.com/graalvm/graalvm-ce-builds/releases/download/vm-21.2.0/graalvm-ce-java11-linux-amd64-21.2.0.tar.gz
|
||||
|
||||
# checksum
|
||||
cd /tmp
|
||||
sha256sum --check CHECKSUMS
|
||||
|
||||
# install kubeconform
|
||||
tar -xf /tmp/kubeconform-v0.4.7.tar.gz
|
||||
cp kubeconform /usr/local/bin
|
||||
|
||||
# install graalvm
|
||||
tar -xzf graalvm-ce-java11-linux-amd64-21.2.0.tar.gz
|
||||
mv graalvm-ce-java11-21.2.0 /usr/lib/jvm/
|
||||
ln -s /usr/lib/jvm/graalvm-ce-java11-21.2.0 /usr/lib/jvm/graalvm
|
||||
ln -s /usr/lib/jvm/graalvm/bin/gu /usr/local/bin
|
||||
|
||||
update-alternatives --install /usr/bin/java java /usr/lib/jvm/graalvm/bin/java 2
|
||||
|
||||
gu install native-image
|
||||
ln -s /usr/lib/jvm/graalvm/bin/native-image /usr/local/bin
|
||||
|
||||
#install lein
|
||||
/tmp/lein.sh
|
||||
|
||||
cleanupDocker
|
||||
}
|
||||
|
||||
source /tmp/install_functions.sh
|
||||
main
|
@ -1,423 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Ensure this file is executable via `chmod a+x lein`, then place it
|
||||
# somewhere on your $PATH, like ~/bin. The rest of Leiningen will be
|
||||
# installed upon first run into the ~/.lein/self-installs directory.
|
||||
|
||||
function msg {
|
||||
echo "$@" 1>&2
|
||||
}
|
||||
|
||||
export LEIN_VERSION="2.9.6"
|
||||
# Must be sha256sum, will be replaced by bin/release
|
||||
export LEIN_CHECKSUM='41c543f73eec4327dc20e60d5d820fc2a9dc772bc671610b9c385d9c4f5970b8'
|
||||
|
||||
case $LEIN_VERSION in
|
||||
*SNAPSHOT) SNAPSHOT="YES" ;;
|
||||
*) SNAPSHOT="NO" ;;
|
||||
esac
|
||||
|
||||
if [[ "$CLASSPATH" != "" ]]; then
|
||||
cat <<-'EOS' 1>&2
|
||||
WARNING: You have $CLASSPATH set, probably by accident.
|
||||
It is strongly recommended to unset this before proceeding.
|
||||
EOS
|
||||
fi
|
||||
|
||||
if [[ "$OSTYPE" == "cygwin" ]] || [[ "$OSTYPE" == "msys" ]]; then
|
||||
delimiter=";"
|
||||
else
|
||||
delimiter=":"
|
||||
fi
|
||||
|
||||
if [[ "$OSTYPE" == "cygwin" ]]; then
|
||||
cygwin=true
|
||||
else
|
||||
cygwin=false
|
||||
fi
|
||||
|
||||
function command_not_found {
|
||||
msg "Leiningen couldn't find $1 in your \$PATH ($PATH), which is required."
|
||||
exit 1
|
||||
}
|
||||
|
||||
function make_native_path {
|
||||
# ensure we have native paths
|
||||
if $cygwin && [[ "$1" == /* ]]; then
|
||||
echo -n "$(cygpath -wp "$1")"
|
||||
elif [[ "$OSTYPE" == "msys" && "$1" == /?/* ]]; then
|
||||
echo -n "$(sh -c "(cd $1 2</dev/null && pwd -W) || echo $1 | sed 's/^\\/\([a-z]\)/\\1:/g'")"
|
||||
else
|
||||
echo -n "$1"
|
||||
fi
|
||||
}
|
||||
|
||||
# usage : add_path PATH_VAR [PATH]...
|
||||
function add_path {
|
||||
local path_var="$1"
|
||||
shift
|
||||
while [ -n "$1" ];do
|
||||
# http://bashify.com/?Useful_Techniques:Indirect_Variables:Indirect_Assignment
|
||||
if [[ -z ${!path_var} ]]; then
|
||||
export ${path_var}="$(make_native_path "$1")"
|
||||
else
|
||||
export ${path_var}="${!path_var}${delimiter}$(make_native_path "$1")"
|
||||
fi
|
||||
shift
|
||||
done
|
||||
}
|
||||
|
||||
function download_failed_message {
|
||||
cat <<-EOS 1>&2
|
||||
Failed to download $1 (exit code $2)
|
||||
It's possible your HTTP client's certificate store does not have the
|
||||
correct certificate authority needed. This is often caused by an
|
||||
out-of-date version of libssl. It's also possible that you're behind a
|
||||
firewall and haven't set HTTP_PROXY and HTTPS_PROXY.
|
||||
EOS
|
||||
}
|
||||
|
||||
function checksum_failed_message {
|
||||
cat <<-EOS 1>&2
|
||||
Failed to properly download $1
|
||||
The checksum was mismatched. and we could not verify the downloaded
|
||||
file. We expected a sha256 of
|
||||
$2 and actually had
|
||||
$3.
|
||||
We used '$SHASUM_CMD' to verify the downloaded file.
|
||||
EOS
|
||||
}
|
||||
|
||||
function self_install {
|
||||
if [ -r "$LEIN_JAR" ]; then
|
||||
cat <<-EOS 1>&2
|
||||
The self-install jar already exists at $LEIN_JAR.
|
||||
If you wish to re-download, delete it and rerun "$0 self-install".
|
||||
EOS
|
||||
exit 1
|
||||
fi
|
||||
msg "Downloading Leiningen to $LEIN_JAR now..."
|
||||
mkdir -p "$(dirname "$LEIN_JAR")"
|
||||
LEIN_URL="https://github.com/technomancy/leiningen/releases/download/$LEIN_VERSION/leiningen-$LEIN_VERSION-standalone.zip"
|
||||
$HTTP_CLIENT "$LEIN_JAR.pending" "$LEIN_URL"
|
||||
local exit_code=$?
|
||||
if [ $exit_code == 0 ]; then
|
||||
printf "$LEIN_CHECKSUM $LEIN_JAR.pending\n" > "$LEIN_JAR.pending.shasum"
|
||||
$SHASUM_CMD -c "$LEIN_JAR.pending.shasum"
|
||||
if [ $? == 0 ]; then
|
||||
mv -f "$LEIN_JAR.pending" "$LEIN_JAR"
|
||||
else
|
||||
got_sum="$($SHASUM_CMD "$LEIN_JAR.pending" | cut -f 1 -d ' ')"
|
||||
checksum_failed_message "$LEIN_URL" "$LEIN_CHECKSUM" "$got_sum"
|
||||
rm "$LEIN_JAR.pending" 2> /dev/null
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
rm "$LEIN_JAR.pending" 2> /dev/null
|
||||
download_failed_message "$LEIN_URL" "$exit_code"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
NOT_FOUND=1
|
||||
ORIGINAL_PWD="$PWD"
|
||||
while [ ! -r "$PWD/project.clj" ] && [ "$PWD" != "/" ] && [ $NOT_FOUND -ne 0 ]
|
||||
do
|
||||
cd ..
|
||||
if [ "$(dirname "$PWD")" = "/" ]; then
|
||||
NOT_FOUND=0
|
||||
cd "$ORIGINAL_PWD"
|
||||
fi
|
||||
done
|
||||
|
||||
export LEIN_HOME="${LEIN_HOME:-"$HOME/.lein"}"
|
||||
|
||||
for f in "/etc/leinrc" "$LEIN_HOME/leinrc" ".leinrc"; do
|
||||
if [ -e "$f" ]; then
|
||||
source "$f"
|
||||
fi
|
||||
done
|
||||
|
||||
if $cygwin; then
|
||||
export LEIN_HOME=$(cygpath -w "$LEIN_HOME")
|
||||
fi
|
||||
|
||||
LEIN_JAR="$LEIN_HOME/self-installs/leiningen-$LEIN_VERSION-standalone.jar"
|
||||
|
||||
# normalize $0 on certain BSDs
|
||||
if [ "$(dirname "$0")" = "." ]; then
|
||||
SCRIPT="$(which "$(basename "$0")")"
|
||||
if [ -z "$SCRIPT" ]; then
|
||||
SCRIPT="$0"
|
||||
fi
|
||||
else
|
||||
SCRIPT="$0"
|
||||
fi
|
||||
|
||||
# resolve symlinks to the script itself portably
|
||||
while [ -h "$SCRIPT" ] ; do
|
||||
ls=$(ls -ld "$SCRIPT")
|
||||
link=$(expr "$ls" : '.*-> \(.*\)$')
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
SCRIPT="$link"
|
||||
else
|
||||
SCRIPT="$(dirname "$SCRIPT"$)/$link"
|
||||
fi
|
||||
done
|
||||
|
||||
BIN_DIR="$(dirname "$SCRIPT")"
|
||||
|
||||
export LEIN_JVM_OPTS="${LEIN_JVM_OPTS-"-Xverify:none -XX:+TieredCompilation -XX:TieredStopAtLevel=1"}"
|
||||
|
||||
# This needs to be defined before we call HTTP_CLIENT below
|
||||
if [ "$HTTP_CLIENT" = "" ]; then
|
||||
if type -p curl >/dev/null 2>&1; then
|
||||
if [ "$https_proxy" != "" ]; then
|
||||
CURL_PROXY="-x $https_proxy"
|
||||
fi
|
||||
HTTP_CLIENT="curl $CURL_PROXY -f -L -o"
|
||||
else
|
||||
HTTP_CLIENT="wget -O"
|
||||
fi
|
||||
fi
|
||||
|
||||
# This needs to be defined before we call SHASUM_CMD below
|
||||
if [ "$SHASUM_CMD" = "" ]; then
|
||||
if type -p sha256sum >/dev/null 2>&1; then
|
||||
export SHASUM_CMD="sha256sum"
|
||||
elif type -p shasum >/dev/null 2>&1; then
|
||||
export SHASUM_CMD="shasum --algorithm 256"
|
||||
elif type -p sha256 >/dev/null 2>&1; then
|
||||
export SHASUM_CMD="sha256 -q"
|
||||
else
|
||||
command_not_found sha256sum
|
||||
fi
|
||||
fi
|
||||
|
||||
# When :eval-in :classloader we need more memory
|
||||
grep -E -q '^\s*:eval-in\s+:classloader\s*$' project.clj 2> /dev/null && \
|
||||
export LEIN_JVM_OPTS="$LEIN_JVM_OPTS -Xms64m -Xmx512m"
|
||||
|
||||
if [ -r "$BIN_DIR/../src/leiningen/version.clj" ]; then
|
||||
# Running from source checkout
|
||||
LEIN_DIR="$(cd $(dirname "$BIN_DIR");pwd -P)"
|
||||
|
||||
# Need to use lein release to bootstrap the leiningen-core library (for aether)
|
||||
if [ ! -r "$LEIN_DIR/leiningen-core/.lein-bootstrap" ]; then
|
||||
cat <<-'EOS' 1>&2
|
||||
Leiningen is missing its dependencies.
|
||||
Please run "lein bootstrap" in the leiningen-core/ directory
|
||||
with a stable release of Leiningen. See CONTRIBUTING.md for details.
|
||||
EOS
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# If project.clj for lein or leiningen-core changes, we must recalculate
|
||||
LAST_PROJECT_CHECKSUM=$(cat "$LEIN_DIR/.lein-project-checksum" 2> /dev/null)
|
||||
PROJECT_CHECKSUM=$(sum "$LEIN_DIR/project.clj" "$LEIN_DIR/leiningen-core/project.clj")
|
||||
if [ "$PROJECT_CHECKSUM" != "$LAST_PROJECT_CHECKSUM" ]; then
|
||||
if [ -r "$LEIN_DIR/.lein-classpath" ]; then
|
||||
rm "$LEIN_DIR/.lein-classpath"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Use bin/lein to calculate its own classpath.
|
||||
if [ ! -r "$LEIN_DIR/.lein-classpath" ] && [ "$1" != "classpath" ]; then
|
||||
msg "Recalculating Leiningen's classpath."
|
||||
cd "$LEIN_DIR"
|
||||
|
||||
LEIN_NO_USER_PROFILES=1 "$LEIN_DIR/bin/lein" classpath .lein-classpath
|
||||
sum "$LEIN_DIR/project.clj" "$LEIN_DIR/leiningen-core/project.clj" > \
|
||||
.lein-project-checksum
|
||||
cd -
|
||||
fi
|
||||
|
||||
mkdir -p "$LEIN_DIR/target/classes"
|
||||
export LEIN_JVM_OPTS="$LEIN_JVM_OPTS -Dclojure.compile.path=$LEIN_DIR/target/classes"
|
||||
add_path CLASSPATH "$LEIN_DIR/leiningen-core/src/" "$LEIN_DIR/leiningen-core/resources/" \
|
||||
"$LEIN_DIR/test:$LEIN_DIR/target/classes" "$LEIN_DIR/src" ":$LEIN_DIR/resources"
|
||||
|
||||
if [ -r "$LEIN_DIR/.lein-classpath" ]; then
|
||||
add_path CLASSPATH "$(cat "$LEIN_DIR/.lein-classpath" 2> /dev/null)"
|
||||
else
|
||||
add_path CLASSPATH "$(cat "$LEIN_DIR/leiningen-core/.lein-bootstrap" 2> /dev/null)"
|
||||
fi
|
||||
else # Not running from a checkout
|
||||
add_path CLASSPATH "$LEIN_JAR"
|
||||
|
||||
if [ "$LEIN_USE_BOOTCLASSPATH" != "no" ]; then
|
||||
LEIN_JVM_OPTS="-Xbootclasspath/a:$LEIN_JAR $LEIN_JVM_OPTS"
|
||||
fi
|
||||
|
||||
if [ ! -r "$LEIN_JAR" -a "$1" != "self-install" ]; then
|
||||
self_install
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ ! -x "$JAVA_CMD" ] && ! type -f java >/dev/null
|
||||
then
|
||||
msg "Leiningen couldn't find 'java' executable, which is required."
|
||||
msg "Please either set JAVA_CMD or put java (>=1.6) in your \$PATH ($PATH)."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
export LEIN_JAVA_CMD="${LEIN_JAVA_CMD:-${JAVA_CMD:-java}}"
|
||||
|
||||
if [[ -z "${DRIP_INIT+x}" && "$(basename "$LEIN_JAVA_CMD")" == *drip* ]]; then
|
||||
export DRIP_INIT="$(printf -- '-e\n(require (quote leiningen.repl))')"
|
||||
export DRIP_INIT_CLASS="clojure.main"
|
||||
fi
|
||||
|
||||
# Support $JAVA_OPTS for backwards-compatibility.
|
||||
export JVM_OPTS="${JVM_OPTS:-"$JAVA_OPTS"}"
|
||||
|
||||
# Handle jline issue with cygwin not propagating OSTYPE through java subprocesses: https://github.com/jline/jline2/issues/62
|
||||
cygterm=false
|
||||
if $cygwin; then
|
||||
case "$TERM" in
|
||||
rxvt* | xterm* | vt*) cygterm=true ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
if $cygterm; then
|
||||
LEIN_JVM_OPTS="$LEIN_JVM_OPTS -Djline.terminal=jline.UnixTerminal"
|
||||
stty -icanon min 1 -echo > /dev/null 2>&1
|
||||
fi
|
||||
|
||||
# TODO: investigate http://skife.org/java/unix/2011/06/20/really_executable_jars.html
|
||||
# If you're packaging this for a package manager (.deb, homebrew, etc)
|
||||
# you need to remove the self-install and upgrade functionality or see lein-pkg.
|
||||
if [ "$1" = "self-install" ]; then
|
||||
if [ -r "$BIN_DIR/../src/leiningen/version.clj" ]; then
|
||||
cat <<-'EOS' 1>&2
|
||||
Running self-install from a checkout is not supported.
|
||||
See CONTRIBUTING.md for SNAPSHOT-specific build instructions.
|
||||
EOS
|
||||
exit 1
|
||||
fi
|
||||
msg "Manual self-install is deprecated; it will run automatically when necessary."
|
||||
self_install
|
||||
elif [ "$1" = "upgrade" ] || [ "$1" = "downgrade" ]; then
|
||||
if [ "$LEIN_DIR" != "" ]; then
|
||||
msg "The upgrade task is not meant to be run from a checkout."
|
||||
exit 1
|
||||
fi
|
||||
if [ $SNAPSHOT = "YES" ]; then
|
||||
cat <<-'EOS' 1>&2
|
||||
The upgrade task is only meant for stable releases.
|
||||
See the "Bootstrapping" section of CONTRIBUTING.md.
|
||||
EOS
|
||||
exit 1
|
||||
fi
|
||||
if [ ! -w "$SCRIPT" ]; then
|
||||
msg "You do not have permission to upgrade the installation in $SCRIPT"
|
||||
exit 1
|
||||
else
|
||||
TARGET_VERSION="${2:-stable}"
|
||||
echo "The script at $SCRIPT will be upgraded to the latest $TARGET_VERSION version."
|
||||
echo -n "Do you want to continue [Y/n]? "
|
||||
read RESP
|
||||
case "$RESP" in
|
||||
y|Y|"")
|
||||
echo
|
||||
msg "Upgrading..."
|
||||
TARGET="/tmp/lein-${$}-upgrade"
|
||||
if $cygwin; then
|
||||
TARGET=$(cygpath -w "$TARGET")
|
||||
fi
|
||||
LEIN_SCRIPT_URL="https://github.com/technomancy/leiningen/raw/$TARGET_VERSION/bin/lein"
|
||||
$HTTP_CLIENT "$TARGET" "$LEIN_SCRIPT_URL"
|
||||
if [ $? == 0 ]; then
|
||||
cmp -s "$TARGET" "$SCRIPT"
|
||||
if [ $? == 0 ]; then
|
||||
msg "Leiningen is already up-to-date."
|
||||
fi
|
||||
mv "$TARGET" "$SCRIPT" && chmod +x "$SCRIPT"
|
||||
unset CLASSPATH
|
||||
exec "$SCRIPT" version
|
||||
else
|
||||
download_failed_message "$LEIN_SCRIPT_URL"
|
||||
fi;;
|
||||
*)
|
||||
msg "Aborted."
|
||||
exit 1;;
|
||||
esac
|
||||
fi
|
||||
else
|
||||
if $cygwin; then
|
||||
# When running on Cygwin, use Windows-style paths for java
|
||||
ORIGINAL_PWD=$(cygpath -w "$ORIGINAL_PWD")
|
||||
fi
|
||||
|
||||
# apply context specific CLASSPATH entries
|
||||
if [ -f .lein-classpath ]; then
|
||||
add_path CLASSPATH "$(cat .lein-classpath)"
|
||||
fi
|
||||
|
||||
if [ -n "$DEBUG" ]; then
|
||||
msg "Leiningen's classpath: $CLASSPATH"
|
||||
fi
|
||||
|
||||
if [ -r .lein-fast-trampoline ]; then
|
||||
export LEIN_FAST_TRAMPOLINE='y'
|
||||
fi
|
||||
|
||||
if [ "$LEIN_FAST_TRAMPOLINE" != "" ] && [ -r project.clj ]; then
|
||||
INPUTS="$* $(cat project.clj) $LEIN_VERSION $(test -f "$LEIN_HOME/profiles.clj" && cat "$LEIN_HOME/profiles.clj") $(test -f profiles.clj && cat profiles.clj)"
|
||||
|
||||
INPUT_CHECKSUM=$(echo "$INPUTS" | $SHASUM_CMD | cut -f 1 -d " ")
|
||||
# Just don't change :target-path in project.clj, mkay?
|
||||
TRAMPOLINE_FILE="target/trampolines/$INPUT_CHECKSUM"
|
||||
else
|
||||
if hash mktemp 2>/dev/null; then
|
||||
# Check if mktemp is available before using it
|
||||
TRAMPOLINE_FILE="$(mktemp /tmp/lein-trampoline-XXXXXXXXXXXXX)"
|
||||
else
|
||||
TRAMPOLINE_FILE="/tmp/lein-trampoline-$$"
|
||||
fi
|
||||
trap 'rm -f $TRAMPOLINE_FILE' EXIT
|
||||
fi
|
||||
|
||||
if $cygwin; then
|
||||
TRAMPOLINE_FILE=$(cygpath -w "$TRAMPOLINE_FILE")
|
||||
fi
|
||||
|
||||
if [ "$INPUT_CHECKSUM" != "" ] && [ -r "$TRAMPOLINE_FILE" ]; then
|
||||
if [ -n "$DEBUG" ]; then
|
||||
msg "Fast trampoline with $TRAMPOLINE_FILE."
|
||||
fi
|
||||
exec sh -c "exec $(cat "$TRAMPOLINE_FILE")"
|
||||
else
|
||||
export TRAMPOLINE_FILE
|
||||
"$LEIN_JAVA_CMD" \
|
||||
-Dfile.encoding=UTF-8 \
|
||||
-Dmaven.wagon.http.ssl.easy=false \
|
||||
-Dmaven.wagon.rto=10000 \
|
||||
$LEIN_JVM_OPTS \
|
||||
-Dleiningen.input-checksum="$INPUT_CHECKSUM" \
|
||||
-Dleiningen.original.pwd="$ORIGINAL_PWD" \
|
||||
-Dleiningen.script="$SCRIPT" \
|
||||
-classpath "$CLASSPATH" \
|
||||
clojure.main -m leiningen.core.main "$@"
|
||||
|
||||
EXIT_CODE=$?
|
||||
|
||||
if $cygterm ; then
|
||||
stty icanon echo > /dev/null 2>&1
|
||||
fi
|
||||
|
||||
if [ -r "$TRAMPOLINE_FILE" ] && [ "$LEIN_TRAMPOLINE_WARMUP" = "" ]; then
|
||||
TRAMPOLINE="$(cat "$TRAMPOLINE_FILE")"
|
||||
if [ "$INPUT_CHECKSUM" = "" ]; then # not using fast trampoline
|
||||
rm "$TRAMPOLINE_FILE"
|
||||
fi
|
||||
if [ "$TRAMPOLINE" = "" ]; then
|
||||
exit $EXIT_CODE
|
||||
else
|
||||
exec sh -c "exec $TRAMPOLINE"
|
||||
fi
|
||||
else
|
||||
exit $EXIT_CODE
|
||||
fi
|
||||
fi
|
||||
fi
|
@ -1,11 +0,0 @@
|
||||
FROM clojure
|
||||
|
||||
RUN apt update
|
||||
RUN apt -yqq --no-install-recommends --yes install curl default-jre-headless
|
||||
|
||||
RUN curl -L -o /tmp/serverspec.jar \
|
||||
https://github.com/DomainDrivenArchitecture/dda-serverspec-crate/releases/download/2.0.0/dda-serverspec-standalone.jar
|
||||
|
||||
COPY serverspec.edn /tmp/serverspec.edn
|
||||
|
||||
RUN java -jar /tmp/serverspec.jar /tmp/serverspec.edn -v
|
@ -1 +0,0 @@
|
||||
{}
|
@ -0,0 +1,57 @@
|
||||
from os import environ
|
||||
from datetime import datetime
|
||||
from pybuilder.core import task, init
|
||||
from ddadevops import *
|
||||
|
||||
name = "ddadevops"
|
||||
MODULE = "ddadevops"
|
||||
PROJECT_ROOT_PATH = "../.."
|
||||
version = "4.12.1-dev"
|
||||
|
||||
|
||||
@init
|
||||
def initialize(project):
|
||||
image_tag = version
|
||||
if "dev" in image_tag:
|
||||
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
|
||||
|
||||
input = {
|
||||
"name": name,
|
||||
"module": MODULE,
|
||||
"stage": "notused",
|
||||
"project_root_path": PROJECT_ROOT_PATH,
|
||||
"build_types": ["IMAGE"],
|
||||
"mixin_types": [],
|
||||
"image_naming": "NAME_ONLY",
|
||||
"image_tag": f"{image_tag}",
|
||||
}
|
||||
|
||||
project.build_depends_on("ddadevops>=4.9.0")
|
||||
|
||||
build = DevopsImageBuild(project, input)
|
||||
build.initialize_build_dir()
|
||||
|
||||
|
||||
@task
|
||||
def image(project):
|
||||
build = get_devops_build(project)
|
||||
build.image()
|
||||
|
||||
|
||||
@task
|
||||
def drun(project):
|
||||
build = get_devops_build(project)
|
||||
build.drun()
|
||||
|
||||
|
||||
@task
|
||||
def test(project):
|
||||
build = get_devops_build(project)
|
||||
build.test()
|
||||
|
||||
|
||||
@task
|
||||
def publish(project):
|
||||
build = get_devops_build(project)
|
||||
build.dockerhub_login()
|
||||
build.dockerhub_publish()
|
@ -0,0 +1,5 @@
|
||||
FROM python:3.10-alpine
|
||||
|
||||
ADD resources /tmp
|
||||
|
||||
RUN /tmp/install.sh
|
@ -0,0 +1,19 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -exo pipefail
|
||||
|
||||
function main() {
|
||||
{
|
||||
upgradeSystem
|
||||
|
||||
apk add --no-cache python3 py3-pip openssl-dev bash git curl
|
||||
python3 -m pip install -U pip
|
||||
pip3 install pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml inflection
|
||||
|
||||
cleanupDocker
|
||||
|
||||
} > /dev/null
|
||||
}
|
||||
|
||||
source /tmp/install_functions_alpine.sh
|
||||
main
|
@ -1,6 +0,0 @@
|
||||
FROM docker:latest
|
||||
|
||||
RUN set -eux;
|
||||
RUN apk add --no-cache build-base rust python3 python3-dev py3-pip py3-setuptools py3-wheel libffi-dev openssl-dev cargo bash git;
|
||||
RUN python3 -m pip install -U pip;
|
||||
RUN pip3 install pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml;
|
@ -1,11 +0,0 @@
|
||||
FROM devops-build
|
||||
|
||||
RUN apk update
|
||||
RUN apk add curl openjdk8
|
||||
|
||||
RUN curl -L -o /tmp/serverspec.jar \
|
||||
https://github.com/DomainDrivenArchitecture/dda-serverspec-crate/releases/download/2.0.0/dda-serverspec-standalone.jar
|
||||
|
||||
COPY serverspec.edn /tmp/serverspec.edn
|
||||
|
||||
RUN java -jar /tmp/serverspec.jar /tmp/serverspec.edn -v
|
@ -1 +0,0 @@
|
||||
{}
|
@ -0,0 +1,57 @@
|
||||
from os import environ
|
||||
from datetime import datetime
|
||||
from pybuilder.core import task, init
|
||||
from ddadevops import *
|
||||
|
||||
name = "ddadevops"
|
||||
MODULE = "dind"
|
||||
PROJECT_ROOT_PATH = "../.."
|
||||
version = "4.12.1-dev"
|
||||
|
||||
|
||||
@init
|
||||
def initialize(project):
|
||||
image_tag = version
|
||||
if "dev" in image_tag:
|
||||
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
|
||||
|
||||
input = {
|
||||
"name": name,
|
||||
"module": MODULE,
|
||||
"stage": "notused",
|
||||
"project_root_path": PROJECT_ROOT_PATH,
|
||||
"build_types": ["IMAGE"],
|
||||
"mixin_types": [],
|
||||
"image_naming": "NAME_AND_MODULE",
|
||||
"image_tag": f"{image_tag}",
|
||||
}
|
||||
|
||||
project.build_depends_on("ddadevops>=4.7.0")
|
||||
|
||||
build = DevopsImageBuild(project, input)
|
||||
build.initialize_build_dir()
|
||||
|
||||
|
||||
@task
|
||||
def image(project):
|
||||
build = get_devops_build(project)
|
||||
build.image()
|
||||
|
||||
|
||||
@task
|
||||
def drun(project):
|
||||
build = get_devops_build(project)
|
||||
build.drun()
|
||||
|
||||
|
||||
@task
|
||||
def test(project):
|
||||
build = get_devops_build(project)
|
||||
build.test()
|
||||
|
||||
|
||||
@task
|
||||
def publish(project):
|
||||
build = get_devops_build(project)
|
||||
build.dockerhub_login()
|
||||
build.dockerhub_publish()
|
@ -0,0 +1,5 @@
|
||||
FROM docker:latest
|
||||
|
||||
WORKDIR /tmp
|
||||
ADD resources ./
|
||||
RUN ./install.sh
|
@ -0,0 +1,17 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -exo pipefail
|
||||
|
||||
function main() {
|
||||
{
|
||||
upgradeSystem
|
||||
|
||||
apk add --no-cache python3 py3-pip openssl-dev bash git
|
||||
pip3 install --break-system-packages pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml inflection
|
||||
|
||||
cleanupDocker
|
||||
} > /dev/null
|
||||
}
|
||||
|
||||
source /tmp/install_functions_alpine.sh
|
||||
main
|
@ -0,0 +1,57 @@
|
||||
from os import environ
|
||||
from datetime import datetime
|
||||
from pybuilder.core import task, init
|
||||
from ddadevops import *
|
||||
|
||||
name = "ddadevops"
|
||||
MODULE = "kotlin"
|
||||
PROJECT_ROOT_PATH = "../.."
|
||||
version = "4.12.1-dev"
|
||||
|
||||
|
||||
@init
|
||||
def initialize(project):
|
||||
image_tag = version
|
||||
if "dev" in image_tag:
|
||||
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
|
||||
|
||||
input = {
|
||||
"name": name,
|
||||
"module": MODULE,
|
||||
"stage": "notused",
|
||||
"project_root_path": PROJECT_ROOT_PATH,
|
||||
"build_types": ["IMAGE"],
|
||||
"mixin_types": [],
|
||||
"image_naming": "NAME_AND_MODULE",
|
||||
"image_tag": f"{image_tag}",
|
||||
}
|
||||
|
||||
project.build_depends_on("ddadevops>=4.0.0")
|
||||
|
||||
build = DevopsImageBuild(project, input)
|
||||
build.initialize_build_dir()
|
||||
|
||||
|
||||
@task
|
||||
def image(project):
|
||||
build = get_devops_build(project)
|
||||
build.image()
|
||||
|
||||
|
||||
@task
|
||||
def drun(project):
|
||||
build = get_devops_build(project)
|
||||
build.drun()
|
||||
|
||||
|
||||
@task
|
||||
def test(project):
|
||||
build = get_devops_build(project)
|
||||
build.test()
|
||||
|
||||
|
||||
@task
|
||||
def publish(project):
|
||||
build = get_devops_build(project)
|
||||
build.dockerhub_login()
|
||||
build.dockerhub_publish()
|
@ -0,0 +1,4 @@
|
||||
FROM debian:stable-slim
|
||||
|
||||
ADD resources /tmp
|
||||
RUN /tmp/install.sh
|
@ -0,0 +1,17 @@
|
||||
#!/bin/bash
|
||||
set -exo pipefail
|
||||
|
||||
function main() {
|
||||
{
|
||||
upgradeSystem
|
||||
|
||||
apt-get -qqy install curl git kotlin gradle iputils-ping ssh python3 python3-pip
|
||||
|
||||
pip3 install --break-system-packages pybuilder 'ddadevops>=4.7.0' deprecation dda-python-terraform boto3 pyyaml inflection
|
||||
|
||||
cleanupDocker
|
||||
} > /dev/null
|
||||
}
|
||||
|
||||
source /tmp/install_functions_debian.sh
|
||||
DEBIAN_FRONTEND=noninteractive DEBCONF_NOWARNINGS=yes main
|
@ -0,0 +1,4 @@
|
||||
FROM python:3.10-alpine
|
||||
|
||||
ADD resources /tmp
|
||||
RUN /tmp/install.sh
|
@ -0,0 +1,20 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -exo pipefail
|
||||
|
||||
function main() {
|
||||
{
|
||||
upgradeSystem
|
||||
|
||||
apk add --no-cache build-base rust python3 python3-dev py3-pip py3-setuptools py3-wheel libffi-dev openssl-dev cargo bash git curl
|
||||
python3 -m pip install -U pip
|
||||
pip3 install pybuilder ddadevops deprecation dda-python-terraform boto3 pyyaml inflection \
|
||||
coverage flake8 flake8-polyfill mypy mypy-extensions pycodestyle pyflakes pylint pytest pytest-cov pytest-datafiles types-setuptools types-PyYAML
|
||||
|
||||
cleanupDocker
|
||||
|
||||
} > /dev/null
|
||||
}
|
||||
|
||||
source /tmp/install_functions_alpine.sh
|
||||
main
|
@ -0,0 +1,46 @@
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from .common import (
|
||||
Validateable,
|
||||
)
|
||||
|
||||
|
||||
class ArtifactType(Enum):
|
||||
TEXT = 0
|
||||
JAR = 1
|
||||
|
||||
|
||||
class Artifact(Validateable):
|
||||
def __init__(self, path: str):
|
||||
self.path_str = path
|
||||
|
||||
def path(self) -> Path:
|
||||
return Path(self.path_str)
|
||||
|
||||
def type(self) -> str:
|
||||
suffix = self.path().suffix
|
||||
match suffix:
|
||||
case ".jar":
|
||||
return "application/x-java-archive"
|
||||
case ".js":
|
||||
return "application/x-javascript"
|
||||
case _:
|
||||
return "text/plain"
|
||||
|
||||
def validate(self):
|
||||
result = []
|
||||
result += self.__validate_is_not_empty__("path_str")
|
||||
try:
|
||||
Path(self.path_str)
|
||||
except Exception as e:
|
||||
result += [f"path was not a valid: {e}"]
|
||||
return result
|
||||
|
||||
def __str__(self):
|
||||
return str(self.path())
|
||||
|
||||
def __eq__(self, other):
|
||||
return other and self.__str__() == other.__str__()
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return self.__str__().__hash__()
|
@ -0,0 +1,21 @@
|
||||
function upgradeSystem() {
|
||||
apk -U upgrade
|
||||
}
|
||||
|
||||
function cleanupDocker() {
|
||||
rm -f /root/.ssh/authorized_keys
|
||||
rm -f /root/.ssh/authorized_keys2
|
||||
|
||||
apk cache clean
|
||||
|
||||
rm -rf /tmp/*
|
||||
|
||||
find /var/cache -type f -exec rm -rf {} \;
|
||||
find /var/log/ -name '*.log' -exec rm -f {} \;
|
||||
}
|
||||
|
||||
function cleanupAmi() {
|
||||
rm -f /home/ubuntu/.ssh/authorized_keys
|
||||
rm -f /home/ubuntu/.ssh/authorized_keys2
|
||||
cleanupDocker
|
||||
}
|
@ -0,0 +1,25 @@
|
||||
function upgradeSystem() {
|
||||
apt-get update
|
||||
apt-get -qqy upgrade
|
||||
}
|
||||
|
||||
function cleanupDocker() {
|
||||
rm -f /root/.ssh/authorized_keys
|
||||
rm -f /root/.ssh/authorized_keys2
|
||||
|
||||
apt-get clean
|
||||
apt-get -qqy autoremove --purge
|
||||
apt-get -qqy autoclean
|
||||
rm -rf /var/lib/apt/lists/
|
||||
|
||||
rm -rf /tmp/*
|
||||
|
||||
find /var/cache -type f -exec rm -rf {} \;
|
||||
find /var/log/ -name '*.log' -exec rm -f {} \;
|
||||
}
|
||||
|
||||
function cleanupAmi() {
|
||||
rm -f /home/ubuntu/.ssh/authorized_keys
|
||||
rm -f /home/ubuntu/.ssh/authorized_keys2
|
||||
cleanupDocker
|
||||
}
|
@ -0,0 +1,32 @@
|
||||
import pytest
|
||||
from pybuilder.core import Project
|
||||
from pathlib import Path
|
||||
from src.main.python.ddadevops.domain import (
|
||||
Validateable,
|
||||
DnsRecord,
|
||||
Devops,
|
||||
BuildType,
|
||||
MixinType,
|
||||
Artifact,
|
||||
Image,
|
||||
)
|
||||
from .helper import build_devops, devops_config
|
||||
|
||||
|
||||
def test_should_validate_release():
|
||||
sut = Artifact("x")
|
||||
assert sut.is_valid()
|
||||
|
||||
sut = Artifact(None)
|
||||
assert not sut.is_valid()
|
||||
|
||||
def test_should_calculate_type():
|
||||
sut = Artifact("x.jar")
|
||||
assert "application/x-java-archive" == sut.type()
|
||||
|
||||
sut = Artifact("x.js")
|
||||
assert "application/x-javascript" == sut.type()
|
||||
|
||||
sut = Artifact("x.jar.sha256")
|
||||
assert "text/plain" == sut.type()
|
||||
|
Loading…
Reference in New Issue