Compare commits
163 commits
Author | SHA1 | Date | |
---|---|---|---|
7c4cd23e56 | |||
3fd4a72d3d | |||
0fb8218dbf | |||
3c92a8b2e6 | |||
be63bfde3e | |||
a3d97a0d04 | |||
c1c25a016f | |||
0abb2370e4 | |||
5af484bcce | |||
722ba5f716 | |||
|
d88670503b | ||
bc27522fb5 | |||
441bc497d1 | |||
ddb8e4160d | |||
71c544bad8 | |||
1a7955b738 | |||
9c9d7dc14b | |||
f94f3a7c43 | |||
026e5f260a | |||
35b52fdbe8 | |||
d44570e9bb | |||
5e204617d0 | |||
7d50eec689 | |||
b509685c62 | |||
30daf13244 | |||
fd431b229d | |||
58e4e9f065 | |||
124300c271 | |||
382c571d42 | |||
cb30395d4d | |||
dcbeb0e934 | |||
9dd3664d8c | |||
ae6bd30923 | |||
ffbd97fb8f | |||
d5ad3c7628 | |||
4ba2857db7 | |||
262685efda | |||
9863c3c115 | |||
5bdfb662bc | |||
73b8fb0707 | |||
09139ba1b1 | |||
e92972db08 | |||
7821fb431a | |||
2d57a67c63 | |||
816b76070e | |||
4483f54987 | |||
42b4f18b3f | |||
fd959877a7 | |||
f8f3e41529 | |||
e3e5ad775f | |||
021db11acb | |||
46b20fe086 | |||
3d2c7ae396 | |||
412ea860f8 | |||
085273a726 | |||
f913b1d6b7 | |||
ee097cb230 | |||
5d1ee33790 | |||
607cdaf408 | |||
9c73aaec5e | |||
fbc603d1f9 | |||
de7feeee19 | |||
6048bf41d5 | |||
e84ebc2bfb | |||
af20c6da16 | |||
f2bb91560a | |||
9389fd5a24 | |||
437f372fa3 | |||
d8351ba873 | |||
b3dfcb70e0 | |||
f46b890778 | |||
4ab98db4aa | |||
d6a3e3e722 | |||
ed882822e7 | |||
4c17cb661b | |||
adb1fa06a4 | |||
06663390c2 | |||
40a7cba90d | |||
|
b80c977c79 | ||
|
80ca26b26a | ||
|
f6003d8e2f | ||
|
e1ae747ebf | ||
|
edfc33b0bd | ||
|
7080f38a93 | ||
|
785e9a3ce7 | ||
|
44675d38f5 | ||
|
bcd523ff67 | ||
|
b373f85311 | ||
|
053f956774 | ||
|
d83132b61a | ||
|
09120a943b | ||
|
ba72a09409 | ||
|
134786ebf0 | ||
|
6ea1320bef | ||
|
151c5dc92a | ||
|
61a76803d4 | ||
|
ffd9e1de2a | ||
|
47cec4d211 | ||
|
fc584c2a48 | ||
|
a33aebce1c | ||
|
632b653b2b | ||
|
bda8b97e59 | ||
|
6da2d853fc | ||
|
3efa8b2f65 | ||
|
e915a81f7c | ||
|
7c4ac21fb3 | ||
|
89914bbbd8 | ||
|
bcc1321563 | ||
|
3eaaec37d0 | ||
|
09fdcf2f11 | ||
|
c0ff5bf65a | ||
|
4a76a44c91 | ||
|
f39b7e237e | ||
|
a63e9dad24 | ||
|
76351ba371 | ||
|
ce788acc3f | ||
|
6d8dde7991 | ||
|
0a7162a5ce | ||
|
172ab6b509 | ||
|
7056a853da | ||
|
c79e807028 | ||
|
3a32241cf1 | ||
|
a05fc98e8d | ||
|
c6d37cbaeb | ||
|
35db7917b0 | ||
|
29daa9c40a | ||
|
c46560eeed | ||
|
c000d00504 | ||
|
70920d828c | ||
|
01e3dbb05a | ||
|
3ddab331f0 | ||
|
64e804e7a0 | ||
|
0228de1a60 | ||
|
749bec79e4 | ||
|
57a80d7e71 | ||
|
05c44c2cdc | ||
|
832fc9eb7d | ||
|
576197d768 | ||
|
813d23d759 | ||
|
99950cb03c | ||
|
a27362a145 | ||
|
e2b7bd4686 | ||
|
fe0e651607 | ||
|
e9255b118c | ||
|
bd6528e68e | ||
|
98d221c779 | ||
|
99c67e5fe5 | ||
|
4945f4591d | ||
|
723ab0b79e | ||
|
ec826887f5 | ||
|
9afa4e1d4e | ||
|
a4b36e1418 | ||
|
3c9ea8b526 | ||
|
45860aa39c | ||
|
042868bada | ||
|
68954d1447 | ||
|
b098b5a1d8 | ||
|
4191b40cdc | ||
|
86182a604f | ||
|
3b181bc403 | ||
|
825fa0e54f | ||
|
da5e648e3f | ||
|
0b2eb3b1be |
29 changed files with 1528 additions and 648 deletions
|
@ -1,7 +0,0 @@
|
||||||
[bumpversion]
|
|
||||||
current_version = 0.9.0
|
|
||||||
commit = True
|
|
||||||
tag = False
|
|
||||||
|
|
||||||
[bumpversion:file:setup.py]
|
|
||||||
|
|
33
.gitignore
vendored
33
.gitignore
vendored
|
@ -1,10 +1,39 @@
|
||||||
|
# terraform
|
||||||
*.tfstate
|
*.tfstate
|
||||||
*.tfstate.backup
|
*.tfstate.backup
|
||||||
|
|
||||||
|
# python
|
||||||
*.pyc
|
*.pyc
|
||||||
*.egg-info
|
*.egg-info
|
||||||
.idea
|
|
||||||
.cache
|
.cache
|
||||||
|
__pycache__
|
||||||
/.pypirc
|
/.pypirc
|
||||||
/.tox/
|
dist/
|
||||||
|
build/
|
||||||
|
/pytestdebug.log
|
||||||
|
.pytestdebug.log
|
||||||
|
/pytest_cache
|
||||||
|
.lsp
|
||||||
|
|
||||||
|
|
||||||
|
# virtualenv
|
||||||
|
.virtualenv/
|
||||||
|
venv/
|
||||||
|
|
||||||
|
|
||||||
|
# Intellij
|
||||||
|
.idea
|
||||||
|
.idea/
|
||||||
|
|
||||||
|
# VSCode
|
||||||
|
.vscode/
|
||||||
|
pyrightconfig.json
|
||||||
|
tmp.txt
|
||||||
|
|
||||||
|
# other
|
||||||
.dropbox
|
.dropbox
|
||||||
|
.DS_Store
|
||||||
|
/.tox/
|
||||||
|
env/
|
||||||
Icon
|
Icon
|
||||||
|
.clj-kondo
|
117
.gitlab-ci.yml
Normal file
117
.gitlab-ci.yml
Normal file
|
@ -0,0 +1,117 @@
|
||||||
|
image: "python:3.8"
|
||||||
|
|
||||||
|
before_script:
|
||||||
|
- python --version
|
||||||
|
- pip install setuptools wheel twine
|
||||||
|
- pip install .
|
||||||
|
- pip install -r requirements_dev.txt
|
||||||
|
|
||||||
|
stages:
|
||||||
|
- lint
|
||||||
|
- test
|
||||||
|
- build
|
||||||
|
- upload
|
||||||
|
|
||||||
|
flake8:
|
||||||
|
stage: lint
|
||||||
|
allow_failure: true
|
||||||
|
script:
|
||||||
|
- flake8 --max-line-length=120 --count --select=E9,F63,F7,F82 --show-source --statistics dda_python_terraform/*.py
|
||||||
|
- flake8 --count --exit-zero --max-complexity=13 --max-line-length=127 --statistics --ignore F401 dda_python_terraform/*.py
|
||||||
|
|
||||||
|
mypy:
|
||||||
|
stage: lint
|
||||||
|
allow_failure: true
|
||||||
|
script:
|
||||||
|
- python -m mypy dda_python_terraform/terraform.py
|
||||||
|
- python -m mypy dda_python_terraform/tfstate.py
|
||||||
|
|
||||||
|
pylint:
|
||||||
|
stage: lint
|
||||||
|
allow_failure: true
|
||||||
|
script:
|
||||||
|
- pylint -d C0112,C0115,C0301,R0913,R0903,R0902,R0914,R1705,R1732,W0622 dda_python_terraform/*.py
|
||||||
|
|
||||||
|
|
||||||
|
test-0.13.7:
|
||||||
|
stage: test
|
||||||
|
script:
|
||||||
|
- export TFVER=0.13.7
|
||||||
|
- export TFURL=https://releases.hashicorp.com/terraform/
|
||||||
|
- TFURL+=$TFVER
|
||||||
|
- TFURL+="/terraform_"
|
||||||
|
- TFURL+=$TFVER
|
||||||
|
- TFURL+="_linux_amd64.zip"
|
||||||
|
- wget $TFURL -O terraform_bin.zip
|
||||||
|
- mkdir tf_bin
|
||||||
|
- unzip terraform_bin.zip -d tf_bin
|
||||||
|
- PATH=$PATH:$PWD/tf_bin
|
||||||
|
- pytest -v
|
||||||
|
|
||||||
|
test-1.0.8:
|
||||||
|
stage: test
|
||||||
|
script:
|
||||||
|
- export TFVER=1.0.8
|
||||||
|
- export TFURL=https://releases.hashicorp.com/terraform/
|
||||||
|
- TFURL+=$TFVER
|
||||||
|
- TFURL+="/terraform_"
|
||||||
|
- TFURL+=$TFVER
|
||||||
|
- TFURL+="_linux_amd64.zip"
|
||||||
|
- wget $TFURL -O terraform_bin.zip
|
||||||
|
- mkdir tf_bin
|
||||||
|
- unzip terraform_bin.zip -d tf_bin
|
||||||
|
- PATH=$PATH:$PWD/tf_bin
|
||||||
|
- pytest -v
|
||||||
|
|
||||||
|
test-1.1.3:
|
||||||
|
stage: test
|
||||||
|
script:
|
||||||
|
- export TFVER=1.1.3
|
||||||
|
- export TFURL=https://releases.hashicorp.com/terraform/
|
||||||
|
- TFURL+=$TFVER
|
||||||
|
- TFURL+="/terraform_"
|
||||||
|
- TFURL+=$TFVER
|
||||||
|
- TFURL+="_linux_amd64.zip"
|
||||||
|
- wget $TFURL -O terraform_bin.zip
|
||||||
|
- mkdir tf_bin
|
||||||
|
- unzip terraform_bin.zip -d tf_bin
|
||||||
|
- PATH=$PATH:$PWD/tf_bin
|
||||||
|
- pytest -v
|
||||||
|
|
||||||
|
build:
|
||||||
|
stage: build
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_TAG =~ /^release-.*$/'
|
||||||
|
artifacts:
|
||||||
|
paths:
|
||||||
|
- dist/*
|
||||||
|
script:
|
||||||
|
- python setup.py sdist bdist_wheel
|
||||||
|
|
||||||
|
pypi:
|
||||||
|
stage: upload
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_TAG =~ /^release-.*$/'
|
||||||
|
script:
|
||||||
|
- twine upload dist/*
|
||||||
|
|
||||||
|
gitlab:
|
||||||
|
image: registry.gitlab.com/gitlab-org/release-cli:latest
|
||||||
|
stage: upload
|
||||||
|
rules:
|
||||||
|
- if: '$CI_COMMIT_TAG =~ /^release-.*$/'
|
||||||
|
artifacts:
|
||||||
|
paths:
|
||||||
|
- release/*
|
||||||
|
before_script:
|
||||||
|
- echo "upload to gitlab"
|
||||||
|
script:
|
||||||
|
- apk --no-cache add curl
|
||||||
|
- cp -r dist release
|
||||||
|
- cd release
|
||||||
|
- rm *.whl
|
||||||
|
- find . -type f -exec sha256sum {} \; | sort > sha256sum.lst
|
||||||
|
- find . -type f -exec sha512sum {} \; | sort > sha512sum.lst
|
||||||
|
- |
|
||||||
|
release-cli create --name "Release $CI_COMMIT_TAG" --tag-name $CI_COMMIT_TAG \
|
||||||
|
--assets-link "{\"name\":\"release\",\"url\":\"https://gitlab.com/domaindrivenarchitecture/python-terraform/-/jobs/${CI_JOB_ID}/artifacts/file/release\"}" \
|
27
.pre-commit-config.yaml
Normal file
27
.pre-commit-config.yaml
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
default_language_version:
|
||||||
|
python: python3.6
|
||||||
|
repos:
|
||||||
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
|
rev: v3.1.0 # v2.1.0
|
||||||
|
hooks:
|
||||||
|
- id: trailing-whitespace
|
||||||
|
- id: end-of-file-fixer
|
||||||
|
- id: check-docstring-first
|
||||||
|
- id: check-json
|
||||||
|
- id: check-merge-conflict
|
||||||
|
- id: check-toml
|
||||||
|
- id: check-yaml
|
||||||
|
- id: debug-statements
|
||||||
|
- id: requirements-txt-fixer
|
||||||
|
- repo: https://github.com/pycqa/isort
|
||||||
|
rev: 5.5.2
|
||||||
|
hooks:
|
||||||
|
- id: isort
|
||||||
|
- repo: https://github.com/lovesegfault/beautysh
|
||||||
|
rev: 6.0.1
|
||||||
|
hooks:
|
||||||
|
- id: beautysh
|
||||||
|
- repo: https://github.com/psf/black
|
||||||
|
rev: 19.10b0
|
||||||
|
hooks:
|
||||||
|
- id: black
|
52
.travis.yml
52
.travis.yml
|
@ -1,52 +0,0 @@
|
||||||
language: python
|
|
||||||
python:
|
|
||||||
- '2.7'
|
|
||||||
- '3.5'
|
|
||||||
- '3.6'
|
|
||||||
before_install: sudo apt-get install unzip
|
|
||||||
before_script:
|
|
||||||
- export TFVER=0.10.0
|
|
||||||
- export TFURL=https://releases.hashicorp.com/terraform/
|
|
||||||
- TFURL+=$TFVER
|
|
||||||
- TFURL+="/terraform_"
|
|
||||||
- TFURL+=$TFVER
|
|
||||||
- TFURL+="_linux_amd64.zip"
|
|
||||||
- wget $TFURL -O terraform_bin.zip
|
|
||||||
- mkdir tf_bin
|
|
||||||
- unzip terraform_bin.zip -d tf_bin
|
|
||||||
install:
|
|
||||||
- curl https://bootstrap.pypa.io/ez_setup.py -o - | python
|
|
||||||
- pip install tox-travis
|
|
||||||
- pip install .
|
|
||||||
script:
|
|
||||||
- export PATH=$PATH:$PWD/tf_bin
|
|
||||||
- tox
|
|
||||||
branches:
|
|
||||||
only:
|
|
||||||
- master
|
|
||||||
- develop
|
|
||||||
- release/**
|
|
||||||
deploy:
|
|
||||||
- provider: pypi
|
|
||||||
distributions: sdist
|
|
||||||
server: https://testpypi.python.org/pypi
|
|
||||||
user: beelit94
|
|
||||||
password:
|
|
||||||
secure: sWxc+p/gdq3k2WbUGNG2F4TukFNkTkvq6OPaiOvyfgWThYNk6/juRkMd8flmTbh0VGhcjFbpDLeSApb2kFhfiokYJSH1hOOcmXf8xzYH8/+R4DDEiGa5Y/pR9TBvYu4S8eJEfFUFfb1BBpapykj7o43hcaqMExBJIdVJU7aeoEAC1jQeTJh8wWwdJKHy2dNSM+6RVhk3e5+b0LfK7Bk5sU5P+YdEMj79MJU450J4OmZXWzJgvBN5/2QfVa5LrUD00nYuGuiBniz2lVevIHWjUYawUzpPsTa7F0s2WemG9YcV7U8u06xNjY9Ce3CTbxNhc7OIKq+TCkOgR3qZFXVJ8A87G+AT2iQ01VslQ4DJCxnJNTnpqojWnwf6MFL9O8ONioWYO32bhQFKOQ806ASHP4lNMRDKqx8hXtP5In7/r0SARKscv6Bas83rp+FESkKD5vWgkZJG+yx96LlwRLUhSVnVyb/nOJ++zt5RR3BvY2O4p9YAZY3Qt8TQihOdBQKnY3UXsMyNaE25+yvyNWpmyJiePRbTUd+cpLnycnqG9Ll8v6TpFXb6ahFMjlAFfJNQYlREfseClTHSRjZNxfsXGQCsJh6TZAq7jOB5hCk3q41eOUFWARxbyj8j59NBV8fSQrrGJJ9/VZKQeYiQlBB9KpK4PrnH84oeQ8i+VSbVr5w=
|
|
||||||
on:
|
|
||||||
branch: release/**
|
|
||||||
tags: false
|
|
||||||
condition: $TRAVIS_PYTHON_VERSION = "3.5"
|
|
||||||
- provider: pypi
|
|
||||||
distributions: sdist
|
|
||||||
user: beelit94
|
|
||||||
password:
|
|
||||||
secure: QhCiTLrBvw/Uzt3eiLEmvMP3uHnayVCETqEDA+2+Q9vFavqj0CHA76zqYonBFqnh0a3HFCRIVVt+6ynpZ10kpQ3tAObIw+pY39ZPnpAhOjSpFzzMdpIF9Bhv9A93ng2iSESAZPAOwktHzUwjFx0Zvl0lSYD9rutHgttGgdU2CajiUtwTUhCTjOAVdR2Gm+15H808vzKWnMaKflXxZt+fkt279mQTYAtz6eBWtZwIKry/uAJCSrPSWtbi50O0HsWRMXLXWH5Jn/BVjWSDSM92DssUDq0D+tQyp4M5nQXJ9EyAvEdsKNLx3cvNruznh2ohI2jmcoIjwFiS6+wrEmUiXkP86iyzCSqL/EbcOG0xUh3vbfYtMBp7jENgD405+3SEhPY4PlqUmc+HDtB7FUcHz4y7wGWJRGyQzNnjJ6Tv0Ajdz5mfJubWVIvHjcRqkxTVtUKt50o00xZ62M0ZzQkDTIHQEsZly0XeHAgSvNzWkmjt9BiBrZ9OkoWVkRpSrCBy/EcpDNPCTSfSzOQ0Nq1ePFjkkW1n8QWDW9Pdb+/7/P2y9E2S8CT+nXBkRQeQiO86Qf1Ireg7k9TA5VYisVZ6bEXEc9UV0mAojpSsC7zWhVlbAoltN6ZbjKmqy/wqn2QIcJemcSie0JigzKpdw7l8FPT2lCRyTKlYLpRyKXzSkNI=
|
|
||||||
on:
|
|
||||||
branch: master
|
|
||||||
tags: false
|
|
||||||
condition: $TRAVIS_PYTHON_VERSION = "3.5"
|
|
||||||
notifications:
|
|
||||||
email:
|
|
||||||
recipients:
|
|
||||||
- beelit94@gmail.com
|
|
20
CHANGELOG.md
20
CHANGELOG.md
|
@ -1,6 +1,24 @@
|
||||||
# Changelog
|
# Changelog
|
||||||
|
## [0.9.1]
|
||||||
|
1. [#10] log handler error on Linux environment
|
||||||
|
1. [#11] Fix reading state file for remote state and support backend config for
|
||||||
|
init command
|
||||||
|
|
||||||
## [0.9.0]
|
## [0.9.0]
|
||||||
### Fixed
|
|
||||||
1. [#12] Output function doesn't accept parameter 'module'
|
1. [#12] Output function doesn't accept parameter 'module'
|
||||||
1. [#16] Handle empty space/special characters when passing string to command line options
|
1. [#16] Handle empty space/special characters when passing string to command line options
|
||||||
1. Tested with terraform 0.10.0
|
1. Tested with terraform 0.10.0
|
||||||
|
|
||||||
|
## [0.10.0]
|
||||||
|
1. [#27] No interaction for apply function
|
||||||
|
1. [#18] Return access to the subprocess so output can be handled as desired
|
||||||
|
1. [#24] Full support for output(); support for raise_on_error
|
||||||
|
|
||||||
|
## [0.10.1]
|
||||||
|
1. [#48] adding extension for temp file to adopt the change in terraform 0.12.0
|
||||||
|
1. [#49] add workspace support
|
||||||
|
|
||||||
|
## [1.0.1]
|
||||||
|
1. adding option to output the latest cmd
|
||||||
|
1. added refresh command
|
||||||
|
1. intenden to work with tf1.0
|
0
CONTRIBUTING.md
Normal file
0
CONTRIBUTING.md
Normal file
|
@ -1,3 +1,3 @@
|
||||||
Please see README at github_
|
Please see README at github_
|
||||||
|
|
||||||
.. _github: https://github.com/beelit94/python-terraform/blob/master/README.md
|
.. _github: https://github.com/DomainDrivenArchitecture/python-terraform/blob/develop/README.md
|
||||||
|
|
38
README.md
38
README.md
|
@ -1,18 +1,21 @@
|
||||||
|
# dda-python-terraform
|
||||||
|
[![pipeline status](https://gitlab.com/domaindrivenarchitecture/dda-python-terraform/badges/master/pipeline.svg)](https://gitlab.com/domaindrivenarchitecture/dda-python-terraform/-/commits/main)
|
||||||
|
|
||||||
|
[<img src="https://domaindrivenarchitecture.org/img/delta-chat.svg" width=20 alt="DeltaChat"> chat over e-mail](mailto:buero@meissa-gmbh.de?subject=community-chat) | [<img src="https://meissa.de/images/parts/contact/mastodon36_hue9b2464f10b18e134322af482b9c915e_5501_filter_14705073121015236177.png" width=20 alt="M"> meissa@social.meissa-gmbh.de](https://social.meissa-gmbh.de/@meissa) | [Blog](https://domaindrivenarchitecture.org) | [Website](https://meissa.de)
|
||||||
|
|
||||||
|
|
||||||
## Introduction
|
## Introduction
|
||||||
|
|
||||||
python-terraform is a python module provide a wrapper of `terraform` command line tool.
|
dda-python-terraform is a python module provide a wrapper of `terraform` command line tool.
|
||||||
`terraform` is a tool made by Hashicorp, please refer to https://terraform.io/
|
`terraform` is a tool made by Hashicorp, please refer to https://terraform.io/
|
||||||
|
|
||||||
### Status
|
|
||||||
[![Build Status](https://travis-ci.org/beelit94/python-terraform.svg?branch=develop)](https://travis-ci.org/beelit94/python-terraform)
|
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
pip install python-terraform
|
pip install python-terraform
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
#### For any terraform command
|
#### For any terraform command
|
||||||
|
|
||||||
from python_terraform import *
|
from dda_python_terraform import *
|
||||||
t = Terraform()
|
t = Terraform()
|
||||||
return_code, stdout, stderr = t.<cmd_name>(*arguments, **options)
|
return_code, stdout, stderr = t.<cmd_name>(*arguments, **options)
|
||||||
|
|
||||||
|
@ -20,13 +23,13 @@ python-terraform is a python module provide a wrapper of `terraform` command lin
|
||||||
to be able to call the method, you could call cmd_name by adding `_cmd` after command name, for example,
|
to be able to call the method, you could call cmd_name by adding `_cmd` after command name, for example,
|
||||||
`import` here could be called by
|
`import` here could be called by
|
||||||
|
|
||||||
from python_terraform import *
|
from dda_python_terraform import *
|
||||||
t = Terraform()
|
t = Terraform()
|
||||||
return_code, stdout, stderr = t.import_cmd(*arguments, **options)
|
return_code, stdout, stderr = t.import_cmd(*arguments, **options)
|
||||||
|
|
||||||
or just call cmd method directly
|
or just call cmd method directly
|
||||||
|
|
||||||
from python_terraform import *
|
from dda_python_terraform import *
|
||||||
t = Terraform()
|
t = Terraform()
|
||||||
return_code, stdout, stderr = t.cmd(<cmd_name>, *arguments, **options)
|
return_code, stdout, stderr = t.cmd(<cmd_name>, *arguments, **options)
|
||||||
|
|
||||||
|
@ -79,7 +82,7 @@ simply pass the string to arguments of the method, for example,
|
||||||
By default, stdout and stderr are captured and returned. This causes the application to appear to hang. To print terraform output in real time, provide the `capture_output` option with any value other than `None`. This will cause the output of terraform to be printed to the terminal in real time. The value of `stdout` and `stderr` below will be `None`.
|
By default, stdout and stderr are captured and returned. This causes the application to appear to hang. To print terraform output in real time, provide the `capture_output` option with any value other than `None`. This will cause the output of terraform to be printed to the terminal in real time. The value of `stdout` and `stderr` below will be `None`.
|
||||||
|
|
||||||
|
|
||||||
from python_terraform import Terraform
|
from dda_python_terraform import Terraform
|
||||||
t = Terraform()
|
t = Terraform()
|
||||||
return_code, stdout, stderr = t.<cmd_name>(capture_output=False)
|
return_code, stdout, stderr = t.<cmd_name>(capture_output=False)
|
||||||
|
|
||||||
|
@ -93,19 +96,19 @@ In shell:
|
||||||
|
|
||||||
In python-terraform:
|
In python-terraform:
|
||||||
|
|
||||||
from python_terraform import *
|
from dda_python_terraform import *
|
||||||
tf = Terraform(working_dir='/home/test')
|
tf = Terraform(working_dir='/home/test')
|
||||||
tf.apply(no_color=IsFlagged, refresh=False, var={'a':'b', 'c':'d'})
|
tf.apply(no_color=IsFlagged, refresh=False, var={'a':'b', 'c':'d'})
|
||||||
|
|
||||||
or
|
or
|
||||||
|
|
||||||
from python_terraform import *
|
from dda_python_terraform import *
|
||||||
tf = Terraform()
|
tf = Terraform()
|
||||||
tf.apply('/home/test', no_color=IsFlagged, refresh=False, var={'a':'b', 'c':'d'})
|
tf.apply('/home/test', no_color=IsFlagged, refresh=False, var={'a':'b', 'c':'d'})
|
||||||
|
|
||||||
or
|
or
|
||||||
|
|
||||||
from python_terraform import *
|
from dda_python_terraform import *
|
||||||
tf = Terraform(working_dir='/home/test', variables={'a':'b', 'c':'d'})
|
tf = Terraform(working_dir='/home/test', variables={'a':'b', 'c':'d'})
|
||||||
tf.apply(no_color=IsFlagged, refresh=False)
|
tf.apply(no_color=IsFlagged, refresh=False)
|
||||||
|
|
||||||
|
@ -117,7 +120,7 @@ In shell:
|
||||||
|
|
||||||
In python-terraform:
|
In python-terraform:
|
||||||
|
|
||||||
from python_terraform import *
|
from dda_python_terraform import *
|
||||||
tf = terraform(working_dir='/home/test')
|
tf = terraform(working_dir='/home/test')
|
||||||
tf.fmt(diff=True)
|
tf.fmt(diff=True)
|
||||||
|
|
||||||
|
@ -138,10 +141,11 @@ a exhaustive method implementation which I don't prefer to.
|
||||||
Therefore I end-up with using `IsFlagged` or `IsNotFlagged` as value of option
|
Therefore I end-up with using `IsFlagged` or `IsNotFlagged` as value of option
|
||||||
like `-no-color` and `True/False` value reserved for option like `refresh=true`
|
like `-no-color` and `True/False` value reserved for option like `refresh=true`
|
||||||
|
|
||||||
|
## Development & mirrors
|
||||||
|
Development happens at: https://repo.prod.meissa.de/meissa/dda-python-terraform
|
||||||
|
|
||||||
|
Mirrors are:
|
||||||
|
* https://gitlab.com/domaindrivenarchitecture/dda-python-terraform (CI issues and PR)
|
||||||
|
* https://github.com/DomainDrivenArchitecture/dda-python-terraform
|
||||||
|
|
||||||
|
For more details about our repository model see: https://repo.prod.meissa.de/meissa/federate-your-repos
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
9
dda_python_terraform/__init__.py
Normal file
9
dda_python_terraform/__init__.py
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
"""Module providing wrapper for terraform."""
|
||||||
|
from .terraform import (
|
||||||
|
IsFlagged,
|
||||||
|
IsNotFlagged,
|
||||||
|
Terraform,
|
||||||
|
TerraformCommandError,
|
||||||
|
VariableFiles,
|
||||||
|
)
|
||||||
|
from .tfstate import Tfstate
|
578
dda_python_terraform/terraform.py
Normal file
578
dda_python_terraform/terraform.py
Normal file
|
@ -0,0 +1,578 @@
|
||||||
|
"""Module providing wrapper for terraform."""
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Type, Union
|
||||||
|
from packaging import version
|
||||||
|
|
||||||
|
from dda_python_terraform.tfstate import Tfstate
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
COMMAND_WITH_SUBCOMMANDS = {"workspace"}
|
||||||
|
|
||||||
|
|
||||||
|
class TerraformFlag:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class IsFlagged(TerraformFlag):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class IsNotFlagged(TerraformFlag):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
CommandOutput = Tuple[Optional[int], Optional[str], Optional[str]]
|
||||||
|
|
||||||
|
|
||||||
|
class TerraformCommandError(subprocess.CalledProcessError):
|
||||||
|
"""Class representing a terraform error"""
|
||||||
|
def __init__(self, ret_code: int, cmd: str, out: Optional[str], err: Optional[str]):
|
||||||
|
super().__init__(ret_code, cmd)
|
||||||
|
self.out = out
|
||||||
|
self.err = err
|
||||||
|
logger.error("Error with command %s. Reason: %s", self.cmd, self.err)
|
||||||
|
|
||||||
|
|
||||||
|
class Terraform:
|
||||||
|
"""Wrapper of terraform command line tool.
|
||||||
|
|
||||||
|
https://www.terraform.io/
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
working_dir: Optional[str] = None,
|
||||||
|
targets: Optional[Sequence[str]] = None,
|
||||||
|
state: Optional[str] = None,
|
||||||
|
variables: Optional[Dict[str, str]] = None,
|
||||||
|
parallelism: Optional[str] = None,
|
||||||
|
var_file: Optional[str] = None,
|
||||||
|
terraform_bin_path: Optional[str] = None,
|
||||||
|
is_env_vars_included: bool = True,
|
||||||
|
terraform_semantic_version: Optional[str] = "0.13.0"
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
:param working_dir: the folder of the working folder, if not given,
|
||||||
|
will be current working folder
|
||||||
|
:param targets: list of target
|
||||||
|
as default value of apply/destroy/plan command
|
||||||
|
:param state: path of state file relative to working folder,
|
||||||
|
as a default value of apply/destroy/plan command
|
||||||
|
:param variables: default variables for apply/destroy/plan command,
|
||||||
|
will be override by variable passing by apply/destroy/plan method
|
||||||
|
:param parallelism: default parallelism value for apply/destroy command
|
||||||
|
:param var_file: passed as value of -var-file option,
|
||||||
|
could be string or list, list stands for multiple -var-file option
|
||||||
|
:param terraform_bin_path: binary path of terraform
|
||||||
|
:type is_env_vars_included: bool
|
||||||
|
:param is_env_vars_included: included env variables when calling terraform cmd
|
||||||
|
:param terrform_semantic_version encodes major.minor.patch version of terraform. Defaults to 0.13.0
|
||||||
|
"""
|
||||||
|
self.is_env_vars_included = is_env_vars_included
|
||||||
|
self.working_dir = working_dir
|
||||||
|
self.state = state
|
||||||
|
self.targets = [] if targets is None else targets
|
||||||
|
self.variables = {} if variables is None else variables
|
||||||
|
self.parallelism = parallelism
|
||||||
|
self.terraform_bin_path = (
|
||||||
|
terraform_bin_path if terraform_bin_path else "terraform"
|
||||||
|
)
|
||||||
|
self.terraform_semantic_version = terraform_semantic_version
|
||||||
|
self.var_file = var_file
|
||||||
|
self.temp_var_files = VariableFiles()
|
||||||
|
|
||||||
|
# store the tfstate data
|
||||||
|
self.tfstate = None
|
||||||
|
self.read_state_file(self.state)
|
||||||
|
|
||||||
|
self.latest_cmd = ''
|
||||||
|
|
||||||
|
def apply(
|
||||||
|
self,
|
||||||
|
dir_or_plan: Optional[str] = None,
|
||||||
|
input: bool = False,
|
||||||
|
skip_plan: bool = True,
|
||||||
|
no_color: Type[TerraformFlag] = IsFlagged,
|
||||||
|
**kwargs,
|
||||||
|
) -> CommandOutput:
|
||||||
|
"""Refer to https://terraform.io/docs/commands/apply.html
|
||||||
|
|
||||||
|
no-color is flagged by default
|
||||||
|
:param no_color: disable color of stdout
|
||||||
|
:param input: disable prompt for a missing variable
|
||||||
|
:param dir_or_plan: folder relative to working folder
|
||||||
|
:param skip_plan: force apply without plan (default: false)
|
||||||
|
:param kwargs: same as kwags in method 'cmd'
|
||||||
|
:returns return_code, stdout, stderr
|
||||||
|
"""
|
||||||
|
if not skip_plan:
|
||||||
|
return self.plan(dir_or_plan=dir_or_plan, **kwargs)
|
||||||
|
global_opts = self._generate_default_general_options(dir_or_plan)
|
||||||
|
default = kwargs.copy()
|
||||||
|
default["input"] = input
|
||||||
|
default["no_color"] = no_color
|
||||||
|
default["auto-approve"] = True # a False value will require an input
|
||||||
|
option_dict = self._generate_default_options(default)
|
||||||
|
args = self._generate_default_args(dir_or_plan)
|
||||||
|
return self.cmd(global_opts, "apply", *args, **option_dict)
|
||||||
|
|
||||||
|
def refresh(
|
||||||
|
self,
|
||||||
|
dir_or_plan: Optional[str] = None,
|
||||||
|
input: bool = False,
|
||||||
|
no_color: Type[TerraformFlag] = IsFlagged,
|
||||||
|
**kwargs,
|
||||||
|
) -> CommandOutput:
|
||||||
|
"""Refer to https://terraform.io/docs/commands/refresh.html
|
||||||
|
|
||||||
|
no-color is flagged by default
|
||||||
|
:param no_color: disable color of stdout
|
||||||
|
:param input: disable prompt for a missing variable
|
||||||
|
:param dir_or_plan: folder relative to working folder
|
||||||
|
:param kwargs: same as kwags in method 'cmd'
|
||||||
|
:returns return_code, stdout, stderr
|
||||||
|
"""
|
||||||
|
global_opts = self._generate_default_general_options(dir_or_plan)
|
||||||
|
default = kwargs.copy()
|
||||||
|
default["input"] = input
|
||||||
|
default["no_color"] = no_color
|
||||||
|
option_dict = self._generate_default_options(default)
|
||||||
|
args = self._generate_default_args(dir_or_plan)
|
||||||
|
return self.cmd(global_opts, "refresh", *args, **option_dict)
|
||||||
|
|
||||||
|
def destroy(
|
||||||
|
self,
|
||||||
|
dir_or_plan: Optional[str] = None,
|
||||||
|
force: Type[TerraformFlag] = IsFlagged,
|
||||||
|
**kwargs,
|
||||||
|
) -> CommandOutput:
|
||||||
|
"""Refer to https://www.terraform.io/docs/commands/destroy.html
|
||||||
|
|
||||||
|
force/no-color option is flagged by default
|
||||||
|
:return: ret_code, stdout, stderr
|
||||||
|
"""
|
||||||
|
global_opts = self._generate_default_general_options(dir_or_plan)
|
||||||
|
default = kwargs.copy()
|
||||||
|
# force is no longer a flag in version >= 1.0
|
||||||
|
if version.parse(self.terraform_semantic_version) < version.parse("1.0.0"):
|
||||||
|
default["force"] = force
|
||||||
|
default["auto-approve"] = True
|
||||||
|
options = self._generate_default_options(default)
|
||||||
|
args = self._generate_default_args(dir_or_plan)
|
||||||
|
return self.cmd(global_opts, "destroy", *args, **options)
|
||||||
|
|
||||||
|
def plan(
|
||||||
|
self,
|
||||||
|
dir_or_plan: Optional[str] = None,
|
||||||
|
detailed_exitcode: Type[TerraformFlag] = IsFlagged,
|
||||||
|
**kwargs,
|
||||||
|
) -> CommandOutput:
|
||||||
|
"""Refer to https://www.terraform.io/docs/commands/plan.html
|
||||||
|
|
||||||
|
:param detailed_exitcode: Return a detailed exit code when the command exits.
|
||||||
|
:param dir_or_plan: relative path to plan/folder
|
||||||
|
:param kwargs: options
|
||||||
|
:return: ret_code, stdout, stderr
|
||||||
|
"""
|
||||||
|
global_opts = self._generate_default_general_options(dir_or_plan)
|
||||||
|
options = kwargs.copy()
|
||||||
|
options["detailed_exitcode"] = detailed_exitcode
|
||||||
|
options = self._generate_default_options(options)
|
||||||
|
args = self._generate_default_args(dir_or_plan)
|
||||||
|
return self.cmd(global_opts, "plan", *args, **options)
|
||||||
|
|
||||||
|
def init(
|
||||||
|
self,
|
||||||
|
dir_or_plan: Optional[str] = None,
|
||||||
|
backend_config: Optional[Dict[str, str]] = None,
|
||||||
|
reconfigure: Type[TerraformFlag] = IsFlagged,
|
||||||
|
backend: bool = True,
|
||||||
|
**kwargs,
|
||||||
|
) -> CommandOutput:
|
||||||
|
"""Refer to https://www.terraform.io/docs/commands/init.html
|
||||||
|
|
||||||
|
By default, this assumes you want to use backend config, and tries to
|
||||||
|
init fresh. The flags -reconfigure and -backend=true are default.
|
||||||
|
|
||||||
|
:param dir_or_plan: relative path to the folder want to init
|
||||||
|
:param backend_config: a dictionary of backend config options. eg.
|
||||||
|
t = Terraform()
|
||||||
|
t.init(backend_config={'access_key': 'myaccesskey',
|
||||||
|
'secret_key': 'mysecretkey', 'bucket': 'mybucketname'})
|
||||||
|
:param reconfigure: whether or not to force reconfiguration of backend
|
||||||
|
:param backend: whether or not to use backend settings for init
|
||||||
|
:param kwargs: options
|
||||||
|
:return: ret_code, stdout, stderr
|
||||||
|
"""
|
||||||
|
options = kwargs.copy()
|
||||||
|
options.update(
|
||||||
|
{
|
||||||
|
"backend_config": backend_config,
|
||||||
|
"reconfigure": reconfigure,
|
||||||
|
"backend": backend,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
options = self._generate_default_options(options)
|
||||||
|
global_opts = self._generate_default_general_options(dir_or_plan)
|
||||||
|
args = self._generate_default_args(dir_or_plan)
|
||||||
|
return self.cmd(global_opts, "init", *args, **options)
|
||||||
|
|
||||||
|
def generate_cmd_string(self, global_options: Dict[str, Any], cmd: str, *args, **kwargs) -> List[str]:
|
||||||
|
"""For any generate_cmd_string doesn't written as public method of Terraform
|
||||||
|
|
||||||
|
examples:
|
||||||
|
1. call import command,
|
||||||
|
ref to https://www.terraform.io/docs/commands/import.html
|
||||||
|
--> generate_cmd_string call:
|
||||||
|
terraform import -input=true aws_instance.foo i-abcd1234
|
||||||
|
--> python call:
|
||||||
|
tf.generate_cmd_string('import', 'aws_instance.foo', 'i-abcd1234', input=True)
|
||||||
|
|
||||||
|
2. call apply command,
|
||||||
|
--> generate_cmd_string call:
|
||||||
|
terraform apply -var='a=b' -var='c=d' -no-color the_folder
|
||||||
|
--> python call:
|
||||||
|
tf.generate_cmd_string('apply', the_folder, no_color=IsFlagged, var={'a':'b', 'c':'d'})
|
||||||
|
|
||||||
|
:param cmd: command and sub-command of terraform, seperated with space
|
||||||
|
refer to https://www.terraform.io/docs/commands/index.html
|
||||||
|
:param args: arguments of a command
|
||||||
|
:param kwargs: same as kwags in method 'cmd'
|
||||||
|
:return: string of valid terraform command
|
||||||
|
"""
|
||||||
|
cmds = [self.terraform_bin_path]
|
||||||
|
cmds += self._generate_cmd_options(**global_options)
|
||||||
|
cmds += cmd.split()
|
||||||
|
if cmd in COMMAND_WITH_SUBCOMMANDS:
|
||||||
|
args = list(args)
|
||||||
|
subcommand = args.pop(0)
|
||||||
|
cmds.append(subcommand)
|
||||||
|
|
||||||
|
cmds += self._generate_cmd_options(**kwargs)
|
||||||
|
|
||||||
|
cmds += args
|
||||||
|
self.latest_cmd = ' '.join(cmds)
|
||||||
|
return cmds
|
||||||
|
|
||||||
|
def cmd(
|
||||||
|
self,
|
||||||
|
global_opts: Dict[str, Any],
|
||||||
|
cmd: str,
|
||||||
|
*args,
|
||||||
|
capture_output: Union[bool, str] = True,
|
||||||
|
raise_on_error: bool = True,
|
||||||
|
synchronous: bool = True,
|
||||||
|
**kwargs,
|
||||||
|
) -> CommandOutput:
|
||||||
|
"""Run a terraform command, if success, will try to read state file
|
||||||
|
|
||||||
|
:param cmd: command and sub-command of terraform, seperated with space
|
||||||
|
refer to https://www.terraform.io/docs/commands/index.html
|
||||||
|
:param args: arguments of a command
|
||||||
|
:param kwargs: any option flag with key value without prefixed dash character
|
||||||
|
if there's a dash in the option name, use under line instead of dash,
|
||||||
|
ex. -no-color --> no_color
|
||||||
|
if it's a simple flag with no value, value should be IsFlagged
|
||||||
|
ex. cmd('taint', allow_missing=IsFlagged)
|
||||||
|
if it's a boolean value flag, assign True or false
|
||||||
|
if it's a flag could be used multiple times, assign list to it's value
|
||||||
|
if it's a "var" variable flag, assign dictionary to it
|
||||||
|
if a value is None, will skip this option
|
||||||
|
if the option 'capture_output' is passed (with any value other than
|
||||||
|
True), terraform output will be printed to stdout/stderr and
|
||||||
|
"None" will be returned as out and err.
|
||||||
|
if the option 'raise_on_error' is passed (with any value that evaluates to True),
|
||||||
|
and the terraform command returns a nonzerop return code, then
|
||||||
|
a TerraformCommandError exception will be raised. The exception object will
|
||||||
|
have the following properties:
|
||||||
|
returncode: The command's return code
|
||||||
|
out: The captured stdout, or None if not captured
|
||||||
|
err: The captured stderr, or None if not captured
|
||||||
|
:return: ret_code, out, err
|
||||||
|
"""
|
||||||
|
if capture_output is True:
|
||||||
|
stderr = subprocess.PIPE
|
||||||
|
stdout = subprocess.PIPE
|
||||||
|
elif capture_output == "framework":
|
||||||
|
stderr = None
|
||||||
|
stdout = None
|
||||||
|
else:
|
||||||
|
stderr = sys.stderr
|
||||||
|
stdout = sys.stdout
|
||||||
|
|
||||||
|
cmds = self.generate_cmd_string(global_opts, cmd, *args, **kwargs)
|
||||||
|
logger.info("Command: %s", " ".join(cmds))
|
||||||
|
|
||||||
|
working_folder = self.working_dir if self.working_dir else None
|
||||||
|
|
||||||
|
environ_vars = {}
|
||||||
|
if self.is_env_vars_included:
|
||||||
|
environ_vars = os.environ.copy()
|
||||||
|
|
||||||
|
proc = subprocess.Popen(
|
||||||
|
cmds, stdout=stdout, stderr=stderr, cwd=working_folder, env=environ_vars
|
||||||
|
)
|
||||||
|
|
||||||
|
if not synchronous:
|
||||||
|
return None, None, None
|
||||||
|
|
||||||
|
out, err = proc.communicate()
|
||||||
|
ret_code = proc.returncode
|
||||||
|
logger.info("output: %s", out)
|
||||||
|
|
||||||
|
if ret_code == 0:
|
||||||
|
self.read_state_file()
|
||||||
|
else:
|
||||||
|
logger.warning("error: %s", err)
|
||||||
|
|
||||||
|
self.temp_var_files.clean_up()
|
||||||
|
if capture_output is True:
|
||||||
|
out = out.decode()
|
||||||
|
err = err.decode()
|
||||||
|
else:
|
||||||
|
out = None
|
||||||
|
err = None
|
||||||
|
|
||||||
|
if ret_code and raise_on_error:
|
||||||
|
raise TerraformCommandError(ret_code, " ".join(cmds), out=out, err=err)
|
||||||
|
|
||||||
|
return ret_code, out, err
|
||||||
|
|
||||||
|
def output(
|
||||||
|
self, dir_or_plan: Optional[str] = None, *args, capture_output: bool = True, **kwargs
|
||||||
|
) -> Union[None, str, Dict[str, str], Dict[str, Dict[str, str]]]:
|
||||||
|
"""Refer https://www.terraform.io/docs/commands/output.html
|
||||||
|
|
||||||
|
Note that this method does not conform to the (ret_code, out, err) return
|
||||||
|
convention. To use the "output" command with the standard convention,
|
||||||
|
call "output_cmd" instead of "output".
|
||||||
|
|
||||||
|
:param args: Positional arguments. There is one optional positional
|
||||||
|
argument NAME; if supplied, the returned output text
|
||||||
|
will be the json for a single named output value.
|
||||||
|
:param kwargs: Named options, passed to the command. In addition,
|
||||||
|
'full_value': If True, and NAME is provided, then
|
||||||
|
the return value will be a dict with
|
||||||
|
"value', 'type', and 'sensitive'
|
||||||
|
properties.
|
||||||
|
:return: None, if an error occured
|
||||||
|
Output value as a string, if NAME is provided and full_value
|
||||||
|
is False or not provided
|
||||||
|
Output value as a dict with 'value', 'sensitive', and 'type' if
|
||||||
|
NAME is provided and full_value is True.
|
||||||
|
dict of named dicts each with 'value', 'sensitive', and 'type',
|
||||||
|
if NAME is not provided
|
||||||
|
"""
|
||||||
|
kwargs["json"] = IsFlagged
|
||||||
|
if capture_output is False:
|
||||||
|
raise ValueError("capture_output is required for this method")
|
||||||
|
|
||||||
|
global_opts = self._generate_default_general_options(dir_or_plan)
|
||||||
|
ret, out, _ = self.cmd(global_opts, "output", *args, **kwargs)
|
||||||
|
|
||||||
|
# ret, out, _ = self.output_cmd(global_opts, *args, **kwargs)
|
||||||
|
|
||||||
|
if ret:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return json.loads(out.lstrip())
|
||||||
|
|
||||||
|
def read_state_file(self, file_path=None) -> None:
|
||||||
|
"""Read .tfstate file
|
||||||
|
|
||||||
|
:param file_path: relative path to working dir
|
||||||
|
:return: states file in dict type
|
||||||
|
"""
|
||||||
|
|
||||||
|
working_dir = self.working_dir or ""
|
||||||
|
|
||||||
|
file_path = file_path or self.state or ""
|
||||||
|
|
||||||
|
if not file_path:
|
||||||
|
backend_path = os.path.join(file_path, ".terraform", "terraform.tfstate")
|
||||||
|
|
||||||
|
if os.path.exists(os.path.join(working_dir, backend_path)):
|
||||||
|
file_path = backend_path
|
||||||
|
else:
|
||||||
|
file_path = os.path.join(file_path, "terraform.tfstate")
|
||||||
|
|
||||||
|
file_path = os.path.join(working_dir, file_path)
|
||||||
|
|
||||||
|
self.tfstate = Tfstate.load_file(file_path)
|
||||||
|
|
||||||
|
def set_workspace(self, workspace, *args, **kwargs) -> CommandOutput:
|
||||||
|
"""Set workspace
|
||||||
|
|
||||||
|
:param workspace: the desired workspace.
|
||||||
|
:return: status
|
||||||
|
"""
|
||||||
|
global_opts = self._generate_default_general_options(False)
|
||||||
|
return self.cmd(global_opts, "workspace", "select", workspace, *args, **kwargs)
|
||||||
|
|
||||||
|
def create_workspace(self, workspace, *args, **kwargs) -> CommandOutput:
|
||||||
|
"""Create workspace
|
||||||
|
|
||||||
|
:param workspace: the desired workspace.
|
||||||
|
:return: status
|
||||||
|
"""
|
||||||
|
global_opts = self._generate_default_general_options(False)
|
||||||
|
return self.cmd(global_opts, "workspace", "new", workspace, *args, **kwargs)
|
||||||
|
|
||||||
|
def delete_workspace(self, workspace, *args, **kwargs) -> CommandOutput:
|
||||||
|
"""Delete workspace
|
||||||
|
|
||||||
|
:param workspace: the desired workspace.
|
||||||
|
:return: status
|
||||||
|
"""
|
||||||
|
global_opts = self._generate_default_general_options(False)
|
||||||
|
return self.cmd(global_opts, "workspace", "delete", workspace, *args, **kwargs)
|
||||||
|
|
||||||
|
def show_workspace(self, **kwargs) -> CommandOutput:
|
||||||
|
"""Show workspace, this command does not need the [DIR] part
|
||||||
|
|
||||||
|
:return: workspace
|
||||||
|
"""
|
||||||
|
global_opts = self._generate_default_general_options(False)
|
||||||
|
return self.cmd(global_opts, "workspace", "show", **kwargs)
|
||||||
|
|
||||||
|
def list_workspace(self) -> List[str]:
|
||||||
|
"""List of workspaces
|
||||||
|
|
||||||
|
:return: workspaces
|
||||||
|
:example:
|
||||||
|
>>> tf = Terraform()
|
||||||
|
>>> tf.list_workspace()
|
||||||
|
['default', 'test']
|
||||||
|
"""
|
||||||
|
global_opts = self._generate_default_general_options(False)
|
||||||
|
return list(
|
||||||
|
filter(
|
||||||
|
lambda workspace: len(workspace) > 0,
|
||||||
|
map(
|
||||||
|
lambda workspace: workspace.strip('*').strip(),
|
||||||
|
(self.cmd(global_opts, "workspace", "list")[1] or '').split()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _generate_default_args(self, dir_or_plan: Optional[str]) -> Sequence[str]:
|
||||||
|
if (version.parse(self.terraform_semantic_version) < version.parse("1.0.0") and dir_or_plan):
|
||||||
|
return [dir_or_plan]
|
||||||
|
elif (version.parse(self.terraform_semantic_version) >= version.parse("1.0.0") and dir_or_plan and os.path.isfile(f'{self.working_dir}/{dir_or_plan}')):
|
||||||
|
plan = dir_or_plan.split('/')[-1]
|
||||||
|
return [plan]
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def _generate_default_general_options(self, dir_or_plan: Optional[str]) -> Dict[str, Any]:
|
||||||
|
if (version.parse(self.terraform_semantic_version) >= version.parse("1.0.0") and dir_or_plan):
|
||||||
|
if os.path.isdir(self.working_dir + '/' + dir_or_plan):
|
||||||
|
return {"chdir": dir_or_plan}
|
||||||
|
else:
|
||||||
|
plan_path = dir_or_plan.split('/')
|
||||||
|
dir_to_plan_path = "/".join(plan_path[:-1])
|
||||||
|
return {"chdir": dir_to_plan_path}
|
||||||
|
else:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def _generate_default_options(
|
||||||
|
self, input_options: Dict[str, Any]
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
return {
|
||||||
|
"state": self.state,
|
||||||
|
"target": self.targets,
|
||||||
|
"var": self.variables,
|
||||||
|
"var_file": self.var_file,
|
||||||
|
"parallelism": self.parallelism,
|
||||||
|
"no_color": IsFlagged,
|
||||||
|
"input": False,
|
||||||
|
**input_options,
|
||||||
|
}
|
||||||
|
|
||||||
|
def _generate_cmd_options(self, **kwargs) -> List[str]:
|
||||||
|
result = []
|
||||||
|
|
||||||
|
for option, value in kwargs.items():
|
||||||
|
if "_" in option:
|
||||||
|
option = option.replace("_", "-")
|
||||||
|
|
||||||
|
if isinstance(value, list):
|
||||||
|
for sub_v in value:
|
||||||
|
result += [f"-{option}={sub_v}"]
|
||||||
|
continue
|
||||||
|
|
||||||
|
if isinstance(value, dict):
|
||||||
|
if "backend-config" in option:
|
||||||
|
for backend_key, backend_value in value.items():
|
||||||
|
result += [f"-backend-config={backend_key}={backend_value}"]
|
||||||
|
continue
|
||||||
|
# since map type sent in string won't work, create temp var file for
|
||||||
|
# variables, and clean it up later
|
||||||
|
if option == "var":
|
||||||
|
# We do not create empty var-files if there is no var passed.
|
||||||
|
# An empty var-file would result in an error: An argument or block definition is required here
|
||||||
|
if value:
|
||||||
|
filename = self.temp_var_files.create(value)
|
||||||
|
result += [f"-var-file={filename}"]
|
||||||
|
|
||||||
|
continue
|
||||||
|
|
||||||
|
# simple flag,
|
||||||
|
if value is IsFlagged:
|
||||||
|
result += [f"-{option}"]
|
||||||
|
continue
|
||||||
|
|
||||||
|
if value is None or value is IsNotFlagged:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if isinstance(value, bool):
|
||||||
|
value = "true" if value else "false"
|
||||||
|
|
||||||
|
result += [f"-{option}={value}"]
|
||||||
|
return result
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_value, traceback) -> None:
|
||||||
|
self.temp_var_files.clean_up()
|
||||||
|
|
||||||
|
def __getattr__(self, item: str) -> Callable:
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
cmd_name = str(item)
|
||||||
|
if cmd_name.endswith("_cmd"):
|
||||||
|
cmd_name = cmd_name[:-4]
|
||||||
|
logger.debug("called with %r and %r", args, kwargs)
|
||||||
|
global_opts = self._generate_default_general_options(False)
|
||||||
|
return self.cmd(global_opts, cmd_name, *args, **kwargs)
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
class VariableFiles:
|
||||||
|
"""Class representing a terraform var files"""
|
||||||
|
def __init__(self):
|
||||||
|
self.files = []
|
||||||
|
|
||||||
|
def create(self, variables: Dict[str, str]) -> str:
|
||||||
|
"""create var file in temp"""
|
||||||
|
with tempfile.NamedTemporaryFile(
|
||||||
|
"w+t", suffix=".tfvars.json", delete=False
|
||||||
|
) as temp:
|
||||||
|
logger.debug("%s is created", temp.name)
|
||||||
|
self.files.append(temp)
|
||||||
|
logger.debug("variables wrote to tempfile: %s", variables)
|
||||||
|
temp.write(json.dumps(variables))
|
||||||
|
file_name = temp.name
|
||||||
|
|
||||||
|
return file_name
|
||||||
|
|
||||||
|
def clean_up(self):
|
||||||
|
"""cleanup the var file"""
|
||||||
|
for fle in self.files:
|
||||||
|
os.unlink(fle.name)
|
||||||
|
|
||||||
|
self.files = []
|
35
dda_python_terraform/tfstate.py
Normal file
35
dda_python_terraform/tfstate.py
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
"""Helper Module providing wrapper for terraform state."""
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from typing import Dict, Optional
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Tfstate:
|
||||||
|
"""Class representing a terraform state"""
|
||||||
|
def __init__(self, data: Optional[Dict[str, str]] = None):
|
||||||
|
self.tfstate_file: Optional[str] = None
|
||||||
|
self.native_data = data
|
||||||
|
if data:
|
||||||
|
self.__dict__ = data
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def load_file(file_path: str) -> "Tfstate":
|
||||||
|
"""Read the tfstate file and load its contents.
|
||||||
|
|
||||||
|
Parses then as JSON and put the result into the object.
|
||||||
|
"""
|
||||||
|
logger.debug("read data from %s", file_path)
|
||||||
|
if os.path.exists(file_path):
|
||||||
|
with open(file_path, encoding="utf-8") as fle:
|
||||||
|
json_data = json.load(fle)
|
||||||
|
|
||||||
|
tf_state = Tfstate(json_data)
|
||||||
|
tf_state.tfstate_file = file_path
|
||||||
|
return tf_state
|
||||||
|
|
||||||
|
logger.debug("%s does not exist", file_path)
|
||||||
|
|
||||||
|
return Tfstate()
|
12
doc/releasing.md
Normal file
12
doc/releasing.md
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
## Release
|
||||||
|
|
||||||
|
```
|
||||||
|
adjust version number in setup.py to release version number.
|
||||||
|
git commit -am "release"
|
||||||
|
git tag -am "release" release-[release version no]
|
||||||
|
git push --follow-tags
|
||||||
|
increase version no in setup.py
|
||||||
|
git commit -am "version bump"
|
||||||
|
git push
|
||||||
|
pip3 install --upgrade --user dda-python-terraform
|
||||||
|
```
|
|
@ -1,310 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# above is for compatibility of python2.7.11
|
|
||||||
|
|
||||||
import subprocess
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
from python_terraform.tfstate import Tfstate
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class IsFlagged:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class IsNotFlagged:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Terraform(object):
|
|
||||||
"""
|
|
||||||
Wrapper of terraform command line tool
|
|
||||||
https://www.terraform.io/
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, working_dir=None,
|
|
||||||
targets=None,
|
|
||||||
state=None,
|
|
||||||
variables=None,
|
|
||||||
parallelism=None,
|
|
||||||
var_file=None,
|
|
||||||
terraform_bin_path=None,
|
|
||||||
is_env_vars_included=True):
|
|
||||||
"""
|
|
||||||
:param working_dir: the folder of the working folder, if not given,
|
|
||||||
will be current working folder
|
|
||||||
:param targets: list of target
|
|
||||||
as default value of apply/destroy/plan command
|
|
||||||
:param state: path of state file relative to working folder,
|
|
||||||
as a default value of apply/destroy/plan command
|
|
||||||
:param variables: default variables for apply/destroy/plan command,
|
|
||||||
will be override by variable passing by apply/destroy/plan method
|
|
||||||
:param parallelism: default parallelism value for apply/destroy command
|
|
||||||
:param var_file: passed as value of -var-file option,
|
|
||||||
could be string or list, list stands for multiple -var-file option
|
|
||||||
:param terraform_bin_path: binary path of terraform
|
|
||||||
:type is_env_vars_included: bool
|
|
||||||
:param is_env_vars_included: included env variables when calling terraform cmd
|
|
||||||
"""
|
|
||||||
self.is_env_vars_included = is_env_vars_included
|
|
||||||
self.working_dir = working_dir
|
|
||||||
self.state = state
|
|
||||||
self.targets = [] if targets is None else targets
|
|
||||||
self.variables = dict() if variables is None else variables
|
|
||||||
self.parallelism = parallelism
|
|
||||||
self.terraform_bin_path = terraform_bin_path \
|
|
||||||
if terraform_bin_path else 'terraform'
|
|
||||||
self.var_file = var_file
|
|
||||||
self.temp_var_files = VariableFiles()
|
|
||||||
|
|
||||||
# store the tfstate data
|
|
||||||
self.tfstate = None
|
|
||||||
self.read_state_file(self.state)
|
|
||||||
|
|
||||||
def __getattr__(self, item):
|
|
||||||
def wrapper(*args, **kwargs):
|
|
||||||
cmd_name = str(item)
|
|
||||||
if cmd_name.endswith('_cmd'):
|
|
||||||
cmd_name = cmd_name[:-4]
|
|
||||||
logging.debug('called with %r and %r' % (args, kwargs))
|
|
||||||
return self.cmd(cmd_name, *args, **kwargs)
|
|
||||||
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
def apply(self, dir_or_plan=None, input=False, no_color=IsFlagged, **kwargs):
|
|
||||||
"""
|
|
||||||
refer to https://terraform.io/docs/commands/apply.html
|
|
||||||
no-color is flagged by default
|
|
||||||
:param no_color: disable color of stdout
|
|
||||||
:param input: disable prompt for a missing variable
|
|
||||||
:param dir_or_plan: folder relative to working folder
|
|
||||||
:param kwargs: same as kwags in method 'cmd'
|
|
||||||
:returns return_code, stdout, stderr
|
|
||||||
"""
|
|
||||||
default = kwargs
|
|
||||||
default['input'] = input
|
|
||||||
default['no_color'] = no_color
|
|
||||||
option_dict = self._generate_default_options(default)
|
|
||||||
args = self._generate_default_args(dir_or_plan)
|
|
||||||
return self.cmd('apply', *args, **option_dict)
|
|
||||||
|
|
||||||
def _generate_default_args(self, dir_or_plan):
|
|
||||||
return [dir_or_plan] if dir_or_plan else []
|
|
||||||
|
|
||||||
def _generate_default_options(self, input_options):
|
|
||||||
option_dict = dict()
|
|
||||||
option_dict['state'] = self.state
|
|
||||||
option_dict['target'] = self.targets
|
|
||||||
option_dict['var'] = self.variables
|
|
||||||
option_dict['var_file'] = self.var_file
|
|
||||||
option_dict['parallelism'] = self.parallelism
|
|
||||||
option_dict['no_color'] = IsFlagged
|
|
||||||
option_dict['input'] = False
|
|
||||||
option_dict.update(input_options)
|
|
||||||
return option_dict
|
|
||||||
|
|
||||||
def destroy(self, dir_or_plan=None, force=IsFlagged, **kwargs):
|
|
||||||
"""
|
|
||||||
refer to https://www.terraform.io/docs/commands/destroy.html
|
|
||||||
force/no-color option is flagged by default
|
|
||||||
:return: ret_code, stdout, stderr
|
|
||||||
"""
|
|
||||||
default = kwargs
|
|
||||||
default['force'] = force
|
|
||||||
options = self._generate_default_options(default)
|
|
||||||
args = self._generate_default_args(dir_or_plan)
|
|
||||||
return self.cmd('destroy', *args, **options)
|
|
||||||
|
|
||||||
def plan(self, dir_or_plan=None, detailed_exitcode=IsFlagged, **kwargs):
|
|
||||||
"""
|
|
||||||
refert to https://www.terraform.io/docs/commands/plan.html
|
|
||||||
:param detailed_exitcode: Return a detailed exit code when the command exits.
|
|
||||||
:param dir_or_plan: relative path to plan/folder
|
|
||||||
:param kwargs: options
|
|
||||||
:return: ret_code, stdout, stderr
|
|
||||||
"""
|
|
||||||
options = kwargs
|
|
||||||
options['detailed_exitcode'] = detailed_exitcode
|
|
||||||
options = self._generate_default_options(options)
|
|
||||||
args = self._generate_default_args(dir_or_plan)
|
|
||||||
return self.cmd('plan', *args, **options)
|
|
||||||
|
|
||||||
def generate_cmd_string(self, cmd, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
for any generate_cmd_string doesn't written as public method of terraform
|
|
||||||
|
|
||||||
examples:
|
|
||||||
1. call import command,
|
|
||||||
ref to https://www.terraform.io/docs/commands/import.html
|
|
||||||
--> generate_cmd_string call:
|
|
||||||
terraform import -input=true aws_instance.foo i-abcd1234
|
|
||||||
--> python call:
|
|
||||||
tf.generate_cmd_string('import', 'aws_instance.foo', 'i-abcd1234', input=True)
|
|
||||||
|
|
||||||
2. call apply command,
|
|
||||||
--> generate_cmd_string call:
|
|
||||||
terraform apply -var='a=b' -var='c=d' -no-color the_folder
|
|
||||||
--> python call:
|
|
||||||
tf.generate_cmd_string('apply', the_folder, no_color=IsFlagged, var={'a':'b', 'c':'d'})
|
|
||||||
|
|
||||||
:param cmd: command and sub-command of terraform, seperated with space
|
|
||||||
refer to https://www.terraform.io/docs/commands/index.html
|
|
||||||
:param args: arguments of a command
|
|
||||||
:param kwargs: same as kwags in method 'cmd'
|
|
||||||
:return: string of valid terraform command
|
|
||||||
"""
|
|
||||||
cmds = cmd.split()
|
|
||||||
cmds = [self.terraform_bin_path] + cmds
|
|
||||||
|
|
||||||
for k, v in kwargs.items():
|
|
||||||
if '_' in k:
|
|
||||||
k = k.replace('_', '-')
|
|
||||||
|
|
||||||
if type(v) is list:
|
|
||||||
for sub_v in v:
|
|
||||||
cmds += ['-{k}={v}'.format(k=k, v=sub_v)]
|
|
||||||
continue
|
|
||||||
|
|
||||||
# right now we assume only variables will be passed as dict
|
|
||||||
# since map type sent in string won't work, create temp var file for
|
|
||||||
# variables, and clean it up later
|
|
||||||
if type(v) is dict:
|
|
||||||
filename = self.temp_var_files.create(v)
|
|
||||||
cmds += ['-var-file={0}'.format(filename)]
|
|
||||||
continue
|
|
||||||
|
|
||||||
# simple flag,
|
|
||||||
if v is IsFlagged:
|
|
||||||
cmds += ['-{k}'.format(k=k)]
|
|
||||||
continue
|
|
||||||
|
|
||||||
if v is None or v is IsNotFlagged:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if type(v) is bool:
|
|
||||||
v = 'true' if v else 'false'
|
|
||||||
|
|
||||||
cmds += ['-{k}={v}'.format(k=k, v=v)]
|
|
||||||
|
|
||||||
cmds += args
|
|
||||||
return cmds
|
|
||||||
|
|
||||||
def cmd(self, cmd, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
run a terraform command, if success, will try to read state file
|
|
||||||
:param cmd: command and sub-command of terraform, seperated with space
|
|
||||||
refer to https://www.terraform.io/docs/commands/index.html
|
|
||||||
:param args: arguments of a command
|
|
||||||
:param kwargs: any option flag with key value without prefixed dash character
|
|
||||||
if there's a dash in the option name, use under line instead of dash,
|
|
||||||
ex. -no-color --> no_color
|
|
||||||
if it's a simple flag with no value, value should be IsFlagged
|
|
||||||
ex. cmd('taint', allow_missing=IsFlagged)
|
|
||||||
if it's a boolean value flag, assign True or false
|
|
||||||
if it's a flag could be used multiple times, assign list to it's value
|
|
||||||
if it's a "var" variable flag, assign dictionary to it
|
|
||||||
if a value is None, will skip this option
|
|
||||||
if the option 'capture_output' is passed (with any value other than
|
|
||||||
True), terraform output will be printed to stdout/stderr and
|
|
||||||
"None" will be returned as out and err.
|
|
||||||
:return: ret_code, out, err
|
|
||||||
"""
|
|
||||||
|
|
||||||
capture_output = kwargs.pop('capture_output', True)
|
|
||||||
if capture_output is True:
|
|
||||||
stderr = subprocess.PIPE
|
|
||||||
stdout = subprocess.PIPE
|
|
||||||
else:
|
|
||||||
stderr = sys.stderr
|
|
||||||
stdout = sys.stdout
|
|
||||||
|
|
||||||
cmds = self.generate_cmd_string(cmd, *args, **kwargs)
|
|
||||||
log.debug('command: {c}'.format(c=' '.join(cmds)))
|
|
||||||
|
|
||||||
working_folder = self.working_dir if self.working_dir else None
|
|
||||||
|
|
||||||
environ_vars = {}
|
|
||||||
if self.is_env_vars_included:
|
|
||||||
environ_vars = os.environ.copy()
|
|
||||||
|
|
||||||
p = subprocess.Popen(cmds, stdout=stdout, stderr=stderr,
|
|
||||||
cwd=working_folder, env=environ_vars)
|
|
||||||
out, err = p.communicate()
|
|
||||||
ret_code = p.returncode
|
|
||||||
log.debug('output: {o}'.format(o=out))
|
|
||||||
|
|
||||||
if ret_code == 0:
|
|
||||||
self.read_state_file()
|
|
||||||
else:
|
|
||||||
log.warn('error: {e}'.format(e=err))
|
|
||||||
|
|
||||||
self.temp_var_files.clean_up()
|
|
||||||
if capture_output is True:
|
|
||||||
return ret_code, out.decode('utf-8'), err.decode('utf-8')
|
|
||||||
else:
|
|
||||||
return ret_code, None, None
|
|
||||||
|
|
||||||
def output(self, name, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
https://www.terraform.io/docs/commands/output.html
|
|
||||||
:param name: name of output
|
|
||||||
:return: output value
|
|
||||||
"""
|
|
||||||
|
|
||||||
ret, out, err = self.cmd(
|
|
||||||
'output', name, json=IsFlagged, *args, **kwargs)
|
|
||||||
|
|
||||||
log.debug('output raw string: {0}'.format(out))
|
|
||||||
if ret != 0:
|
|
||||||
return None
|
|
||||||
out = out.lstrip()
|
|
||||||
|
|
||||||
output_dict = json.loads(out)
|
|
||||||
return output_dict['value']
|
|
||||||
|
|
||||||
def read_state_file(self, file_path=None):
|
|
||||||
"""
|
|
||||||
read .tfstate file
|
|
||||||
:param file_path: relative path to working dir
|
|
||||||
:return: states file in dict type
|
|
||||||
"""
|
|
||||||
|
|
||||||
if not file_path:
|
|
||||||
file_path = self.state
|
|
||||||
|
|
||||||
if not file_path:
|
|
||||||
file_path = 'terraform.tfstate'
|
|
||||||
|
|
||||||
if self.working_dir:
|
|
||||||
file_path = os.path.join(self.working_dir, file_path)
|
|
||||||
|
|
||||||
self.tfstate = Tfstate.load_file(file_path)
|
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_value, traceback):
|
|
||||||
self.temp_var_files.clean_up()
|
|
||||||
|
|
||||||
|
|
||||||
class VariableFiles(object):
|
|
||||||
def __init__(self):
|
|
||||||
self.files = []
|
|
||||||
|
|
||||||
def create(self, variables):
|
|
||||||
with tempfile.NamedTemporaryFile('w+t', delete=False) as temp:
|
|
||||||
log.debug('{0} is created'.format(temp.name))
|
|
||||||
self.files.append(temp)
|
|
||||||
log.debug('variables wrote to tempfile: {0}'.format(str(variables)))
|
|
||||||
temp.write(json.dumps(variables))
|
|
||||||
file_name = temp.name
|
|
||||||
|
|
||||||
return file_name
|
|
||||||
|
|
||||||
def clean_up(self):
|
|
||||||
for f in self.files:
|
|
||||||
os.unlink(f.name)
|
|
||||||
|
|
||||||
self.files = []
|
|
|
@ -1,34 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# above is for compatibility of python2.7.11
|
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import logging
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Tfstate(object):
|
|
||||||
def __init__(self, data=None):
|
|
||||||
self.tfstate_file = None
|
|
||||||
self.native_data = data
|
|
||||||
if data:
|
|
||||||
self.__dict__ = data
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def load_file(file_path):
|
|
||||||
"""
|
|
||||||
Read the tfstate file and load its contents, parses then as JSON and put the result into the object
|
|
||||||
"""
|
|
||||||
log.debug('read data from {0}'.format(file_path))
|
|
||||||
if os.path.exists(file_path):
|
|
||||||
with open(file_path) as f:
|
|
||||||
json_data = json.load(f)
|
|
||||||
|
|
||||||
tf_state = Tfstate(json_data)
|
|
||||||
tf_state.tfstate_file = file_path
|
|
||||||
return tf_state
|
|
||||||
|
|
||||||
log.debug('{0} is not exist'.format(file_path))
|
|
||||||
|
|
||||||
return Tfstate()
|
|
|
@ -1,3 +1 @@
|
||||||
tox-pyenv
|
packaging
|
||||||
pytest
|
|
||||||
tox
|
|
||||||
|
|
11
requirements_dev.txt
Normal file
11
requirements_dev.txt
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
coverage==5.3
|
||||||
|
flake8==3.8.4
|
||||||
|
flake8-polyfill==1.0.2
|
||||||
|
mypy==0.790
|
||||||
|
mypy-extensions==0.4.3
|
||||||
|
pycodestyle==2.6.0
|
||||||
|
pyflakes==2.2.0
|
||||||
|
pylint==2.6.0
|
||||||
|
pytest==6.1.2
|
||||||
|
pytest-cov==2.10.1
|
||||||
|
pytest-datafiles==2.0
|
|
@ -1,2 +1,10 @@
|
||||||
[wheel]
|
[wheel]
|
||||||
universal = 1
|
universal = 1
|
||||||
|
|
||||||
|
[isort]
|
||||||
|
line_length=88
|
||||||
|
known_third_party=
|
||||||
|
indent=' '
|
||||||
|
multi_line_output=3
|
||||||
|
sections=FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
|
||||||
|
include_trailing_comma=true
|
||||||
|
|
48
setup.py
48
setup.py
|
@ -7,12 +7,13 @@ except ImportError:
|
||||||
from distutils.core import setup
|
from distutils.core import setup
|
||||||
|
|
||||||
dependencies = []
|
dependencies = []
|
||||||
module_name = 'python-terraform'
|
module_name = "dda-python-terraform"
|
||||||
short_description = 'This is a python module provide a wrapper ' \
|
short_description = (
|
||||||
'of terraform command line tool'
|
"This is a python module provide a wrapper " "of terraform command line tool"
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open('DESCRIPTION.rst') as f:
|
with open("DESCRIPTION.rst") as f:
|
||||||
long_description = f.read()
|
long_description = f.read()
|
||||||
except IOError:
|
except IOError:
|
||||||
long_description = short_description
|
long_description = short_description
|
||||||
|
@ -20,36 +21,37 @@ except IOError:
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name=module_name,
|
name=module_name,
|
||||||
version='0.9.0',
|
version="2.1.2-dev",
|
||||||
url='https://github.com/beelit94/python-terraform',
|
url="https://repo.prod.meissa.de/meissa/dda-python-terraform",
|
||||||
license='MIT',
|
license="MIT",
|
||||||
author='Freddy Tan',
|
author="Freddy Tan, meissa team",
|
||||||
author_email='beelit94@gmail.com',
|
author_email="buero@meissa.de",
|
||||||
description=short_description,
|
description=short_description,
|
||||||
long_description=long_description,
|
long_description=long_description,
|
||||||
packages=['python_terraform'],
|
packages=["dda_python_terraform"],
|
||||||
package_data={},
|
package_data={},
|
||||||
platforms='any',
|
platforms="any",
|
||||||
install_requires=dependencies,
|
install_requires=dependencies,
|
||||||
|
tests_require=["pytest"],
|
||||||
|
python_requires=">=3.6",
|
||||||
classifiers=[
|
classifiers=[
|
||||||
# As from http://pypi.python.org/pypi?%3Aaction=list_classifiers
|
# As from http://pypi.python.org/pypi?%3Aaction=list_classifiers
|
||||||
# 'Development Status :: 1 - Planning',
|
# 'Development Status :: 1 - Planning',
|
||||||
# 'Development Status :: 2 - Pre-Alpha',
|
# 'Development Status :: 2 - Pre-Alpha',
|
||||||
# 'Development Status :: 3 - Alpha',
|
# 'Development Status :: 3 - Alpha',
|
||||||
'Development Status :: 4 - Beta',
|
"Development Status :: 4 - Beta",
|
||||||
# 'Development Status :: 5 - Production/Stable',
|
# 'Development Status :: 5 - Production/Stable',
|
||||||
# 'Development Status :: 6 - Mature',
|
# 'Development Status :: 6 - Mature',
|
||||||
# 'Development Status :: 7 - Inactive',
|
# 'Development Status :: 7 - Inactive',
|
||||||
'Environment :: Console',
|
"Environment :: Console",
|
||||||
'Intended Audience :: Developers',
|
"Intended Audience :: Developers",
|
||||||
'License :: OSI Approved :: MIT License',
|
"License :: OSI Approved :: MIT License",
|
||||||
'Operating System :: POSIX',
|
"Operating System :: POSIX",
|
||||||
'Operating System :: MacOS',
|
"Operating System :: MacOS",
|
||||||
'Operating System :: Unix',
|
"Operating System :: Unix",
|
||||||
# 'Operating System :: Windows',
|
# 'Operating System :: Windows',
|
||||||
'Programming Language :: Python',
|
"Programming Language :: Python",
|
||||||
'Programming Language :: Python :: 2',
|
"Programming Language :: Python :: 3",
|
||||||
'Programming Language :: Python :: 3',
|
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
],
|
||||||
]
|
|
||||||
)
|
)
|
||||||
|
|
|
@ -5,12 +5,12 @@ variable "test_var" {
|
||||||
provider "archive" {}
|
provider "archive" {}
|
||||||
|
|
||||||
variable "test_list_var" {
|
variable "test_list_var" {
|
||||||
type = "list"
|
type = list(string)
|
||||||
default = ["a", "b"]
|
default = ["a", "b"]
|
||||||
}
|
}
|
||||||
|
|
||||||
variable "test_map_var" {
|
variable "test_map_var" {
|
||||||
type = "map"
|
type = map
|
||||||
|
|
||||||
default = {
|
default = {
|
||||||
"a" = "a"
|
"a" = "a"
|
||||||
|
@ -19,13 +19,13 @@ variable "test_map_var" {
|
||||||
}
|
}
|
||||||
|
|
||||||
output "test_output" {
|
output "test_output" {
|
||||||
value = "${var.test_var}"
|
value = var.test_var
|
||||||
}
|
}
|
||||||
|
|
||||||
output "test_list_output" {
|
output "test_list_output" {
|
||||||
value = "${var.test_list_var}"
|
value = var.test_list_var
|
||||||
}
|
}
|
||||||
|
|
||||||
output "test_map_output" {
|
output "test_map_output" {
|
||||||
value = "${var.test_map_var}"
|
value = var.test_map_var
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,70 +1,209 @@
|
||||||
try:
|
import fnmatch
|
||||||
from cStringIO import StringIO # Python 2
|
|
||||||
except ImportError:
|
|
||||||
from io import StringIO
|
|
||||||
from python_terraform import *
|
|
||||||
import pytest
|
|
||||||
import os
|
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import fnmatch
|
from contextlib import contextmanager
|
||||||
|
from io import StringIO
|
||||||
|
from typing import Callable
|
||||||
|
from packaging import version
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from _pytest.logging import LogCaptureFixture, caplog
|
||||||
|
|
||||||
|
from dda_python_terraform import IsFlagged, IsNotFlagged, Terraform, TerraformCommandError
|
||||||
|
|
||||||
logging.basicConfig(level=logging.DEBUG)
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
root_logger = logging.getLogger()
|
root_logger = logging.getLogger()
|
||||||
|
|
||||||
current_path = os.path.dirname(os.path.realpath(__file__))
|
current_path = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
|
semantic_version = os.environ.get("TFVER")
|
||||||
|
|
||||||
FILE_PATH_WITH_SPACE_AND_SPACIAL_CHARS = "test 'test.out!"
|
FILE_PATH_WITH_SPACE_AND_SPACIAL_CHARS = "test 'test.out!"
|
||||||
STRING_CASES = [
|
STRING_CASES = [
|
||||||
[
|
[
|
||||||
lambda x: x.generate_cmd_string('apply', 'the_folder',
|
lambda x: x.generate_cmd_string(
|
||||||
no_color=IsFlagged),
|
{}, "apply", "the_folder", no_color=IsFlagged),
|
||||||
"terraform apply -no-color the_folder"
|
"terraform apply -no-color the_folder",
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
lambda x: x.generate_cmd_string('push', 'path', vcs=True,
|
lambda x: x.generate_cmd_string({},
|
||||||
token='token',
|
"push", "path", vcs=True, token="token", atlas_address="url"
|
||||||
atlas_address='url'),
|
),
|
||||||
"terraform push -vcs=true -token=token -atlas-address=url path"
|
"terraform push -vcs=true -token=token -atlas-address=url path",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
lambda x: x.generate_cmd_string({},
|
||||||
|
"refresh", "path", token="token"
|
||||||
|
),
|
||||||
|
"terraform refresh -token=token path",
|
||||||
],
|
],
|
||||||
]
|
]
|
||||||
|
|
||||||
CMD_CASES = [
|
CMD_CASES_0_x = [
|
||||||
['method', 'expected_output', 'expected_ret_code', 'expected_logs', 'folder'],
|
[
|
||||||
|
"method",
|
||||||
|
"expected_output",
|
||||||
|
"expected_ret_code",
|
||||||
|
"expected_exception",
|
||||||
|
"expected_logs",
|
||||||
|
"folder",
|
||||||
|
],
|
||||||
[
|
[
|
||||||
[
|
[
|
||||||
lambda x: x.cmd('plan', 'var_to_output', no_color=IsFlagged, var={'test_var': 'test'}) ,
|
lambda x: x.cmd(
|
||||||
"doesn't need to do anything",
|
{},
|
||||||
|
"plan",
|
||||||
|
"var_to_output",
|
||||||
|
no_color=IsFlagged,
|
||||||
|
var={"test_var": "test"},
|
||||||
|
raise_on_error=False,
|
||||||
|
),
|
||||||
|
# Expected output varies by terraform semantic_version
|
||||||
|
"Plan: 0 to add, 0 to change, 0 to destroy.",
|
||||||
0,
|
0,
|
||||||
'',
|
False,
|
||||||
'var_to_output'
|
"",
|
||||||
|
"var_to_output",
|
||||||
],
|
],
|
||||||
# try import aws instance
|
# try import aws instance
|
||||||
[
|
[
|
||||||
lambda x: x.cmd('import', 'aws_instance.foo', 'i-abcd1234', no_color=IsFlagged),
|
lambda x: x.cmd(
|
||||||
'',
|
{},
|
||||||
|
"import",
|
||||||
|
"aws_instance.foo",
|
||||||
|
"i-abcd1234",
|
||||||
|
no_color=IsFlagged,
|
||||||
|
raise_on_error=False,
|
||||||
|
),
|
||||||
|
"",
|
||||||
1,
|
1,
|
||||||
'command: terraform import -no-color aws_instance.foo i-abcd1234',
|
False,
|
||||||
''
|
"Error: No Terraform configuration files",
|
||||||
|
"",
|
||||||
],
|
],
|
||||||
# test with space and special character in file path
|
# test with space and special character in file path
|
||||||
[
|
[
|
||||||
lambda x: x.cmd('plan', 'var_to_output', out=FILE_PATH_WITH_SPACE_AND_SPACIAL_CHARS),
|
lambda x: x.cmd(
|
||||||
'',
|
{},
|
||||||
|
"plan",
|
||||||
|
"var_to_output",
|
||||||
|
out=FILE_PATH_WITH_SPACE_AND_SPACIAL_CHARS,
|
||||||
|
raise_on_error=False,
|
||||||
|
),
|
||||||
|
"",
|
||||||
0,
|
0,
|
||||||
'',
|
False,
|
||||||
'var_to_output'
|
"",
|
||||||
]
|
"var_to_output",
|
||||||
]
|
],
|
||||||
|
# test workspace command (commands with subcommand)
|
||||||
|
[
|
||||||
|
lambda x: x.cmd(
|
||||||
|
{}, "workspace", "show", no_color=IsFlagged, raise_on_error=False
|
||||||
|
),
|
||||||
|
"",
|
||||||
|
0,
|
||||||
|
False,
|
||||||
|
"Command: terraform workspace show -no-color",
|
||||||
|
"",
|
||||||
|
],
|
||||||
|
],
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='function')
|
CMD_CASES_1_x = [
|
||||||
|
[
|
||||||
|
"method",
|
||||||
|
"expected_output",
|
||||||
|
"expected_ret_code",
|
||||||
|
"expected_exception",
|
||||||
|
"expected_logs",
|
||||||
|
"folder",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
[
|
||||||
|
lambda x: x.cmd(
|
||||||
|
{"chdir": "var_to_output"},
|
||||||
|
"plan",
|
||||||
|
no_color=IsFlagged,
|
||||||
|
var={"test_var": "test"},
|
||||||
|
raise_on_error=False,
|
||||||
|
),
|
||||||
|
# Expected output varies by terraform semantic_version
|
||||||
|
"Changes to Outputs:",
|
||||||
|
0,
|
||||||
|
False,
|
||||||
|
"",
|
||||||
|
"var_to_output",
|
||||||
|
],
|
||||||
|
# try import aws instance
|
||||||
|
[
|
||||||
|
lambda x: x.cmd(
|
||||||
|
{},
|
||||||
|
"import",
|
||||||
|
"aws_instance.foo",
|
||||||
|
"i-abcd1234",
|
||||||
|
no_color=IsFlagged,
|
||||||
|
raise_on_error=False,
|
||||||
|
),
|
||||||
|
"",
|
||||||
|
1,
|
||||||
|
False,
|
||||||
|
"Error: No Terraform configuration files",
|
||||||
|
"",
|
||||||
|
],
|
||||||
|
# test with space and special character in file path
|
||||||
|
[
|
||||||
|
lambda x: x.cmd(
|
||||||
|
{"chdir": "var_to_output"},
|
||||||
|
"plan",
|
||||||
|
out=FILE_PATH_WITH_SPACE_AND_SPACIAL_CHARS,
|
||||||
|
raise_on_error=False,
|
||||||
|
),
|
||||||
|
"",
|
||||||
|
0,
|
||||||
|
False,
|
||||||
|
"",
|
||||||
|
"var_to_output",
|
||||||
|
],
|
||||||
|
# test workspace command (commands with subcommand)
|
||||||
|
[
|
||||||
|
lambda x: x.cmd(
|
||||||
|
{}, "workspace", "show", no_color=IsFlagged, raise_on_error=False
|
||||||
|
),
|
||||||
|
"",
|
||||||
|
0,
|
||||||
|
False,
|
||||||
|
"Command: terraform workspace show -no-color",
|
||||||
|
"",
|
||||||
|
],
|
||||||
|
],
|
||||||
|
]
|
||||||
|
|
||||||
|
APPLY_CASES_0_x = [
|
||||||
|
["folder", "variables", "var_files", "expected_output", "options"],
|
||||||
|
[("var_to_output", {"test_var": "test"}, None, 'test_output=test', {}),
|
||||||
|
("var_to_output", {"test_list_var": ["c", "d"]}, None, 'test_list_output=["c","d",]', {},),
|
||||||
|
("var_to_output", {"test_map_var": {"c": "c", "d": "d"}}, None, 'test_map_output={"c"="c""d"="d"}', {},),
|
||||||
|
("var_to_output", {"test_map_var": {"c": "c", "d": "d"}}, "var_to_output/test_map_var.json", 'test_map_output={"e"="e""f"="f"}', {},),
|
||||||
|
("var_to_output", {}, None, "\x1b[0m\x1b[1m\x1b[32mApplycomplete!", {"no_color": IsNotFlagged},), ]]
|
||||||
|
|
||||||
|
APPLY_CASES_1_x = [
|
||||||
|
["folder", "variables", "var_files", "expected_output", "options"],
|
||||||
|
[("var_to_output", {"test_var": "test"}, None, 'test_output="test"', {}),
|
||||||
|
("var_to_output", {"test_list_var": ["c", "d"]}, None, 'test_list_output=tolist(["c","d",])', {},),
|
||||||
|
("var_to_output", {"test_map_var": {"c": "c", "d": "d"}}, None, 'test_map_output=tomap({"c"="c""d"="d"})', {},),
|
||||||
|
("var_to_output", {"test_map_var": {"c": "c", "d": "d"}}, "test_map_var.json", 'test_map_output=tomap({"e"="e""f"="f"})', {},),
|
||||||
|
("var_to_output", {}, None, "\x1b[0m\x1b[1m\x1b[32mApplycomplete!", {"no_color": IsNotFlagged},), ]]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="function")
|
||||||
def fmt_test_file(request):
|
def fmt_test_file(request):
|
||||||
target = os.path.join(current_path, 'bad_fmt', 'test.backup')
|
target = os.path.join(current_path, "bad_fmt", "test.backup")
|
||||||
orgin = os.path.join(current_path, 'bad_fmt', 'test.tf')
|
orgin = os.path.join(current_path, "bad_fmt", "test.tf")
|
||||||
shutil.copy(orgin,
|
shutil.copy(orgin, target)
|
||||||
target)
|
|
||||||
|
|
||||||
def td():
|
def td():
|
||||||
shutil.move(target, orgin)
|
shutil.move(target, orgin)
|
||||||
|
@ -73,28 +212,51 @@ def fmt_test_file(request):
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
|
# @pytest.fixture()
|
||||||
|
# def string_logger(request) -> Callable[..., str]:
|
||||||
|
# log_stream = StringIO()
|
||||||
|
# handler = logging.StreamHandler(log_stream)
|
||||||
|
# root_logger.addHandler(handler)
|
||||||
|
|
||||||
|
# def td():
|
||||||
|
# root_logger.removeHandler(handler)
|
||||||
|
# log_stream.close()
|
||||||
|
|
||||||
|
# request.addfinalizer(td)
|
||||||
|
# return lambda: str(log_stream.getvalue())
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def string_logger(request):
|
def workspace_setup_teardown():
|
||||||
log_stream = StringIO()
|
"""Fixture used in workspace related tests.
|
||||||
handler = logging.StreamHandler(log_stream)
|
|
||||||
root_logger.addHandler(handler)
|
|
||||||
|
|
||||||
def td():
|
Create and tear down a workspace
|
||||||
root_logger.removeHandler(handler)
|
*Use as a contextmanager*
|
||||||
log_stream.close()
|
|
||||||
|
|
||||||
request.addfinalizer(td)
|
|
||||||
return lambda: str(log_stream.getvalue())
|
|
||||||
|
|
||||||
|
|
||||||
class TestTerraform(object):
|
|
||||||
def teardown_method(self, method):
|
|
||||||
""" teardown any state that was previously setup with a setup_method
|
|
||||||
call.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def purge(dir, pattern):
|
@contextmanager
|
||||||
|
def wrapper(workspace_name, create=True, delete=True, *args, **kwargs):
|
||||||
|
tf = Terraform(working_dir=current_path, terraform_semantic_version=semantic_version)
|
||||||
|
tf.init()
|
||||||
|
if create:
|
||||||
|
tf.create_workspace(workspace_name, *args, **kwargs)
|
||||||
|
yield tf
|
||||||
|
if delete:
|
||||||
|
tf.set_workspace("default")
|
||||||
|
tf.delete_workspace(workspace_name)
|
||||||
|
|
||||||
|
yield wrapper
|
||||||
|
|
||||||
|
|
||||||
|
class TestTerraform:
|
||||||
|
def teardown_method(self, _) -> None:
|
||||||
|
"""Teardown any state that was previously setup with a setup_method call."""
|
||||||
|
exclude = ["test_tfstate_file",
|
||||||
|
"test_tfstate_file2", "test_tfstate_file3"]
|
||||||
|
|
||||||
|
def purge(dir: str, pattern: str) -> None:
|
||||||
for root, dirnames, filenames in os.walk(dir):
|
for root, dirnames, filenames in os.walk(dir):
|
||||||
|
dirnames[:] = [d for d in dirnames if d not in exclude]
|
||||||
for filename in fnmatch.filter(filenames, pattern):
|
for filename in fnmatch.filter(filenames, pattern):
|
||||||
f = os.path.join(root, filename)
|
f = os.path.join(root, filename)
|
||||||
os.remove(f)
|
os.remove(f)
|
||||||
|
@ -102,134 +264,294 @@ class TestTerraform(object):
|
||||||
d = os.path.join(root, dirname)
|
d = os.path.join(root, dirname)
|
||||||
shutil.rmtree(d)
|
shutil.rmtree(d)
|
||||||
|
|
||||||
purge('.', '*.tfstate')
|
purge(".", "*.tfstate")
|
||||||
purge('.', '*.terraform')
|
purge(".", "*.tfstate.backup")
|
||||||
purge('.', FILE_PATH_WITH_SPACE_AND_SPACIAL_CHARS)
|
purge(".", "*.terraform")
|
||||||
|
purge(".", FILE_PATH_WITH_SPACE_AND_SPACIAL_CHARS)
|
||||||
|
|
||||||
@pytest.mark.parametrize([
|
@pytest.mark.parametrize(["method", "expected"], STRING_CASES)
|
||||||
"method", "expected"
|
def test_generate_cmd_string(self, method: Callable[..., str], expected: str):
|
||||||
], STRING_CASES)
|
tf = Terraform(working_dir=current_path, terraform_semantic_version=semantic_version)
|
||||||
def test_generate_cmd_string(self, method, expected):
|
|
||||||
tf = Terraform(working_dir=current_path)
|
|
||||||
result = method(tf)
|
result = method(tf)
|
||||||
|
|
||||||
strs = expected.split()
|
strs = expected.split()
|
||||||
for s in strs:
|
for s in strs:
|
||||||
assert s in result
|
assert s in result
|
||||||
|
|
||||||
@pytest.mark.parametrize(*CMD_CASES)
|
@pytest.mark.parametrize(*(CMD_CASES_1_x if version.parse(semantic_version) >= version.parse("1.0.0") else CMD_CASES_0_x))
|
||||||
def test_cmd(self, method, expected_output, expected_ret_code, expected_logs, string_logger, folder):
|
def test_cmd(
|
||||||
tf = Terraform(working_dir=current_path)
|
self,
|
||||||
|
method: Callable[..., str],
|
||||||
|
expected_output: str,
|
||||||
|
expected_ret_code: int,
|
||||||
|
expected_exception: bool,
|
||||||
|
expected_logs: str,
|
||||||
|
caplog: LogCaptureFixture,
|
||||||
|
folder: str,
|
||||||
|
):
|
||||||
|
with caplog.at_level(logging.INFO):
|
||||||
|
tf = Terraform(working_dir=current_path, terraform_semantic_version=semantic_version)
|
||||||
tf.init(folder)
|
tf.init(folder)
|
||||||
ret, out, err = method(tf)
|
try:
|
||||||
logs = string_logger()
|
ret, out, _ = method(tf)
|
||||||
logs = logs.replace('\n', '')
|
assert not expected_exception
|
||||||
|
except TerraformCommandError as e:
|
||||||
|
assert expected_exception
|
||||||
|
ret = e.returncode
|
||||||
|
out = e.out
|
||||||
|
|
||||||
assert expected_output in out
|
assert expected_output in out
|
||||||
assert expected_ret_code == ret
|
assert expected_ret_code == ret
|
||||||
assert expected_logs in logs
|
assert expected_logs in caplog.text
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
("folder", "variables", "var_files", "expected_output", "options"),
|
@pytest.mark.parametrize(*(APPLY_CASES_1_x if version.parse(semantic_version) >= version.parse("1.0.0") else APPLY_CASES_0_x))
|
||||||
[
|
|
||||||
("var_to_output",
|
|
||||||
{'test_var': 'test'}, None, "test_output=test", {}),
|
|
||||||
("var_to_output", {'test_list_var': ['c', 'd']}, None, "test_list_output=[c,d]", {}),
|
|
||||||
("var_to_output", {'test_map_var': {"c": "c", "d": "d"}}, None, "test_map_output={a=ab=bc=cd=d}", {}),
|
|
||||||
("var_to_output", {'test_map_var': {"c": "c", "d": "d"}}, 'var_to_output/test_map_var.json', "test_map_output={a=ab=bc=cd=de=ef=f}", {}),
|
|
||||||
("var_to_output", {}, None, "\x1b[0m\x1b[1m\x1b[32mApplycomplete!", {"no_color": IsNotFlagged})
|
|
||||||
])
|
|
||||||
def test_apply(self, folder, variables, var_files, expected_output, options):
|
def test_apply(self, folder, variables, var_files, expected_output, options):
|
||||||
tf = Terraform(working_dir=current_path, variables=variables, var_file=var_files)
|
tf = Terraform(
|
||||||
# after 0.10.0 we always need to init
|
working_dir=current_path, variables=variables, var_file=var_files, terraform_semantic_version=semantic_version
|
||||||
|
)
|
||||||
tf.init(folder)
|
tf.init(folder)
|
||||||
ret, out, err = tf.apply(folder, **options)
|
ret, out, err = tf.apply(folder, **options)
|
||||||
assert ret == 0
|
assert ret == 0
|
||||||
assert expected_output in out.replace('\n', '').replace(' ', '')
|
assert expected_output in out.replace("\n", "").replace(" ", "")
|
||||||
assert err == ''
|
assert err == ""
|
||||||
|
|
||||||
|
def test_apply_plan(self):
|
||||||
|
# test is only applicable to version > 1.0.0
|
||||||
|
if version.parse(semantic_version) < version.parse("1.0.0"):
|
||||||
|
return
|
||||||
|
|
||||||
|
tf = Terraform(
|
||||||
|
working_dir=current_path, terraform_semantic_version=semantic_version
|
||||||
|
)
|
||||||
|
out_folder = 'var_to_output'
|
||||||
|
out_file_name = 'test.out'
|
||||||
|
out_file_path = f'{out_folder}/{out_file_name}'
|
||||||
|
tf.init(out_folder)
|
||||||
|
ret, _, err = tf.plan(out_folder, detailed_exitcode=IsNotFlagged, out=out_file_name)
|
||||||
|
assert ret == 0
|
||||||
|
assert err == ""
|
||||||
|
|
||||||
|
ret, _, err = tf.apply(out_file_path, skip_plan=True)
|
||||||
|
assert ret == 0
|
||||||
|
assert err == ""
|
||||||
|
|
||||||
|
def test_apply_with_var_file(self, caplog: LogCaptureFixture):
|
||||||
|
with caplog.at_level(logging.INFO):
|
||||||
|
tf = Terraform(working_dir=current_path, terraform_semantic_version=semantic_version)
|
||||||
|
folder = "var_to_output"
|
||||||
|
tf.init(folder)
|
||||||
|
tf.apply(
|
||||||
|
folder,
|
||||||
|
var_file=os.path.join(
|
||||||
|
current_path, "tfvar_files", "test.tfvars"),
|
||||||
|
)
|
||||||
|
for log in caplog.messages:
|
||||||
|
if log.startswith("Command: terraform apply"):
|
||||||
|
assert log.count("-var-file=") == 1
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
['cmd', 'args', 'options'],
|
["cmd", "args", "options"],
|
||||||
[
|
[
|
||||||
# bool value
|
# bool value
|
||||||
('fmt', ['bad_fmt'], {'list': False, 'diff': False})
|
("fmt", ["bad_fmt"], {"list": False, "diff": False})
|
||||||
]
|
],
|
||||||
)
|
)
|
||||||
def test_options(self, cmd, args, options, fmt_test_file):
|
def test_options(self, cmd, args, options, fmt_test_file):
|
||||||
tf = Terraform(working_dir=current_path)
|
tf = Terraform(working_dir=current_path, terraform_semantic_version=semantic_version)
|
||||||
ret, out, err = getattr(tf, cmd)(*args, **options)
|
ret, out, err = getattr(tf, cmd)(*args, **options)
|
||||||
assert ret == 0
|
assert ret == 0
|
||||||
assert out == ''
|
assert out == ""
|
||||||
|
|
||||||
def test_state_data(self):
|
def test_state_data(self):
|
||||||
cwd = os.path.join(current_path, 'test_tfstate_file')
|
cwd = os.path.join(current_path, "test_tfstate_file")
|
||||||
tf = Terraform(working_dir=cwd, state='tfstate.test')
|
tf = Terraform(working_dir=cwd, state="tfstate.test",
|
||||||
|
terraform_semantic_version=semantic_version)
|
||||||
tf.read_state_file()
|
tf.read_state_file()
|
||||||
assert tf.tfstate.modules[0]['path'] == ['root']
|
assert tf.tfstate.modules[0]["path"] == ["root"]
|
||||||
|
|
||||||
|
def test_state_default(self):
|
||||||
|
cwd = os.path.join(current_path, "test_tfstate_file2")
|
||||||
|
tf = Terraform(working_dir=cwd, terraform_semantic_version=semantic_version)
|
||||||
|
tf.read_state_file()
|
||||||
|
assert tf.tfstate.modules[0]["path"] == ["default"]
|
||||||
|
|
||||||
|
def test_state_default_backend(self):
|
||||||
|
cwd = os.path.join(current_path, "test_tfstate_file3")
|
||||||
|
tf = Terraform(working_dir=cwd, terraform_semantic_version=semantic_version)
|
||||||
|
tf.read_state_file()
|
||||||
|
assert tf.tfstate.modules[0]["path"] == ["default_backend"]
|
||||||
|
|
||||||
def test_pre_load_state_data(self):
|
def test_pre_load_state_data(self):
|
||||||
cwd = os.path.join(current_path, 'test_tfstate_file')
|
cwd = os.path.join(current_path, "test_tfstate_file")
|
||||||
tf = Terraform(working_dir=cwd, state='tfstate.test')
|
tf = Terraform(working_dir=cwd, state="tfstate.test",
|
||||||
assert tf.tfstate.modules[0]['path'] == ['root']
|
terraform_semantic_version=semantic_version)
|
||||||
|
assert tf.tfstate.modules[0]["path"] == ["root"]
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
("folder", 'variables'),
|
("folder", "variables"), [("var_to_output", {"test_var": "test"})]
|
||||||
[
|
|
||||||
("var_to_output", {'test_var': 'test'})
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
def test_override_default(self, folder, variables):
|
def test_override_default(self, folder, variables):
|
||||||
tf = Terraform(working_dir=current_path, variables=variables)
|
tf = Terraform(working_dir=current_path,
|
||||||
|
variables=variables, terraform_semantic_version=semantic_version)
|
||||||
tf.init(folder)
|
tf.init(folder)
|
||||||
ret, out, err = tf.apply(folder, var={'test_var': 'test2'},
|
ret, out, err = tf.apply(
|
||||||
no_color=IsNotFlagged)
|
folder, var={"test_var": "test2"}, no_color=IsNotFlagged,
|
||||||
out = out.replace('\n', '')
|
|
||||||
assert '\x1b[0m\x1b[1m\x1b[32mApply' in out
|
|
||||||
out = tf.output('test_output')
|
|
||||||
assert 'test2' in out
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
("param"),
|
|
||||||
[
|
|
||||||
({}),
|
|
||||||
({'module': 'test2'}),
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
def test_output(self, param, string_logger):
|
out = out.replace("\n", "")
|
||||||
tf = Terraform(working_dir=current_path, variables={'test_var': 'test'})
|
assert "\x1b[0m\x1b[1m\x1b[32mApply" in out
|
||||||
tf.init('var_to_output')
|
out = tf.output(folder, "test_output")
|
||||||
tf.apply('var_to_output')
|
assert "test2" in out
|
||||||
result = tf.output('test_output', **param)
|
|
||||||
regex = re.compile("terraform output (-module=test2 -json|-json -module=test2) test_output")
|
@pytest.mark.parametrize("output_all", [True, False])
|
||||||
log_str = string_logger()
|
def test_output(self, caplog: LogCaptureFixture, output_all: bool):
|
||||||
if param:
|
expected_value = "test"
|
||||||
assert re.search(regex, log_str), log_str
|
required_output = "test_output"
|
||||||
|
with caplog.at_level(logging.INFO):
|
||||||
|
tf = Terraform(
|
||||||
|
working_dir=current_path, variables={"test_var": expected_value}, terraform_semantic_version=semantic_version
|
||||||
|
)
|
||||||
|
tf.init("var_to_output")
|
||||||
|
tf.apply("var_to_output")
|
||||||
|
params = tuple() if output_all else (required_output,)
|
||||||
|
result = tf.output("var_to_output", *params)
|
||||||
|
if output_all:
|
||||||
|
assert result[required_output]["value"] == expected_value
|
||||||
else:
|
else:
|
||||||
assert result == 'test'
|
assert result == expected_value
|
||||||
|
assert expected_value in caplog.messages[-1]
|
||||||
|
|
||||||
def test_destroy(self):
|
def test_destroy(self):
|
||||||
tf = Terraform(working_dir=current_path, variables={'test_var': 'test'})
|
tf = Terraform(working_dir=current_path, variables={
|
||||||
tf.init('var_to_output')
|
"test_var": "test"}, terraform_semantic_version=semantic_version)
|
||||||
ret, out, err = tf.destroy('var_to_output')
|
tf.init("var_to_output")
|
||||||
|
ret, out, err = tf.destroy("var_to_output")
|
||||||
assert ret == 0
|
assert ret == 0
|
||||||
assert 'Destroy complete! Resources: 0 destroyed.' in out
|
assert "Destroy complete! Resources: 0 destroyed." in out
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
("plan", "variables", "expected_ret"),
|
("plan", "variables", "expected_ret"), [("vars_require_input", {}, 1)]
|
||||||
[
|
|
||||||
('vars_require_input', {}, 1)
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
def test_plan(self, plan, variables, expected_ret):
|
def test_plan(self, plan, variables, expected_ret):
|
||||||
tf = Terraform(working_dir=current_path, variables=variables)
|
tf = Terraform(working_dir=current_path,
|
||||||
ret, out, err = tf.plan(plan)
|
variables=variables, terraform_semantic_version=semantic_version)
|
||||||
assert ret == expected_ret
|
tf.init(plan)
|
||||||
|
with pytest.raises(TerraformCommandError) as e:
|
||||||
|
tf.plan(plan)
|
||||||
|
assert (
|
||||||
|
"\nError:" in e.value.err
|
||||||
|
)
|
||||||
|
|
||||||
def test_fmt(self, fmt_test_file):
|
def test_fmt(self, fmt_test_file):
|
||||||
tf = Terraform(working_dir=current_path, variables={'test_var': 'test'})
|
tf = Terraform(working_dir=current_path, variables={
|
||||||
|
"test_var": "test"}, terraform_semantic_version=semantic_version)
|
||||||
ret, out, err = tf.fmt(diff=True)
|
ret, out, err = tf.fmt(diff=True)
|
||||||
assert ret == 0
|
assert ret == 0
|
||||||
|
|
||||||
def test_import(self, string_logger):
|
def test_create_workspace(self, workspace_setup_teardown):
|
||||||
|
workspace_name = "test"
|
||||||
|
with workspace_setup_teardown(workspace_name, create=False) as tf:
|
||||||
|
ret, out, err = tf.create_workspace("test")
|
||||||
|
assert ret == 0
|
||||||
|
assert err == ""
|
||||||
|
|
||||||
|
# The directory flag is no longer supported in v1.0.8
|
||||||
|
# this should either be done with -chdir or probably just be removed
|
||||||
|
"""
|
||||||
|
def test_create_workspace_with_args(self, workspace_setup_teardown, caplog):
|
||||||
|
workspace_name = "test"
|
||||||
|
state_file_path = os.path.join(
|
||||||
|
current_path, "test_tfstate_file2", "terraform.tfstate"
|
||||||
|
)
|
||||||
|
with workspace_setup_teardown(
|
||||||
|
workspace_name, create=False
|
||||||
|
) as tf, caplog.at_level(logging.INFO):
|
||||||
|
ret, out, err = tf.create_workspace(
|
||||||
|
"test", current_path, no_color=IsFlagged
|
||||||
|
)
|
||||||
|
|
||||||
|
assert ret == 0
|
||||||
|
assert err == ""
|
||||||
|
assert (
|
||||||
|
f"Command: terraform workspace new -no-color test {current_path}"
|
||||||
|
in caplog.messages
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def test_set_workspace(self, workspace_setup_teardown):
|
||||||
|
workspace_name = "test"
|
||||||
|
with workspace_setup_teardown(workspace_name) as tf:
|
||||||
|
ret, out, err = tf.set_workspace(workspace_name)
|
||||||
|
assert ret == 0
|
||||||
|
assert err == ""
|
||||||
|
|
||||||
|
# see comment on test_create_workspace_with_args
|
||||||
|
"""
|
||||||
|
def test_set_workspace_with_args(self, workspace_setup_teardown, caplog):
|
||||||
|
workspace_name = "test"
|
||||||
|
with workspace_setup_teardown(workspace_name) as tf, caplog.at_level(
|
||||||
|
logging.INFO
|
||||||
|
):
|
||||||
|
ret, out, err = tf.set_workspace(
|
||||||
|
workspace_name, current_path, no_color=IsFlagged
|
||||||
|
)
|
||||||
|
|
||||||
|
assert ret == 0
|
||||||
|
assert err == ""
|
||||||
|
assert (
|
||||||
|
f"Command: terraform workspace select -no-color test {current_path}"
|
||||||
|
in caplog.messages
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def test_show_workspace(self, workspace_setup_teardown):
|
||||||
|
workspace_name = "test"
|
||||||
|
with workspace_setup_teardown(workspace_name) as tf:
|
||||||
|
ret, out, err = tf.show_workspace()
|
||||||
|
assert ret == 0
|
||||||
|
assert err == ""
|
||||||
|
|
||||||
|
def test_show_workspace_with_no_color(self, workspace_setup_teardown, caplog):
|
||||||
|
workspace_name = "test"
|
||||||
|
with workspace_setup_teardown(workspace_name) as tf, caplog.at_level(
|
||||||
|
logging.INFO
|
||||||
|
):
|
||||||
|
ret, out, err = tf.show_workspace(no_color=IsFlagged)
|
||||||
|
|
||||||
|
assert ret == 0
|
||||||
|
assert err == ""
|
||||||
|
assert "Command: terraform workspace show -no-color" in caplog.messages
|
||||||
|
|
||||||
|
def test_delete_workspace(self, workspace_setup_teardown):
|
||||||
|
workspace_name = "test"
|
||||||
|
with workspace_setup_teardown(workspace_name, delete=False) as tf:
|
||||||
|
tf.set_workspace("default")
|
||||||
|
ret, out, err = tf.delete_workspace(workspace_name)
|
||||||
|
assert ret == 0
|
||||||
|
assert err == ""
|
||||||
|
|
||||||
|
# see above comments
|
||||||
|
"""
|
||||||
|
def test_delete_workspace_with_args(self, workspace_setup_teardown, caplog):
|
||||||
|
workspace_name = "test"
|
||||||
|
with workspace_setup_teardown(
|
||||||
|
workspace_name, delete=False
|
||||||
|
) as tf, caplog.at_level(logging.INFO):
|
||||||
|
tf.set_workspace("default")
|
||||||
|
ret, out, err = tf.delete_workspace(
|
||||||
|
workspace_name, current_path, force=IsFlagged,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert ret == 0
|
||||||
|
assert err == ""
|
||||||
|
assert (
|
||||||
|
f"Command: terraform workspace delete -force test {current_path}"
|
||||||
|
in caplog.messages
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def test_list_workspace(self):
|
||||||
tf = Terraform(working_dir=current_path)
|
tf = Terraform(working_dir=current_path)
|
||||||
tf.import_cmd('aws_instance.foo', 'i-abc1234', no_color=IsFlagged)
|
workspaces = tf.list_workspace()
|
||||||
assert 'command: terraform import -no-color aws_instance.foo i-abc1234' in string_logger()
|
assert len(workspaces) > 0
|
||||||
|
assert 'default' in workspaces
|
||||||
|
|
62
test/test_tfstate_file2/terraform.tfstate
Normal file
62
test/test_tfstate_file2/terraform.tfstate
Normal file
|
@ -0,0 +1,62 @@
|
||||||
|
{
|
||||||
|
"version": 3,
|
||||||
|
"terraform_version": "0.7.10",
|
||||||
|
"serial": 0,
|
||||||
|
"lineage": "d03ecdf7-8be0-4593-a952-1d8127875119",
|
||||||
|
"modules": [
|
||||||
|
{
|
||||||
|
"path": [
|
||||||
|
"default"
|
||||||
|
],
|
||||||
|
"outputs": {},
|
||||||
|
"resources": {
|
||||||
|
"aws_instance.ubuntu-1404": {
|
||||||
|
"type": "aws_instance",
|
||||||
|
"depends_on": [],
|
||||||
|
"primary": {
|
||||||
|
"id": "i-84d10edb",
|
||||||
|
"attributes": {
|
||||||
|
"ami": "ami-9abea4fb",
|
||||||
|
"associate_public_ip_address": "true",
|
||||||
|
"availability_zone": "us-west-2b",
|
||||||
|
"disable_api_termination": "false",
|
||||||
|
"ebs_block_device.#": "0",
|
||||||
|
"ebs_optimized": "false",
|
||||||
|
"ephemeral_block_device.#": "0",
|
||||||
|
"iam_instance_profile": "",
|
||||||
|
"id": "i-84d10edb",
|
||||||
|
"instance_state": "running",
|
||||||
|
"instance_type": "t2.micro",
|
||||||
|
"key_name": "",
|
||||||
|
"monitoring": "false",
|
||||||
|
"network_interface_id": "eni-46544f07",
|
||||||
|
"private_dns": "ip-172-31-25-244.us-west-2.compute.internal",
|
||||||
|
"private_ip": "172.31.25.244",
|
||||||
|
"public_dns": "ec2-35-162-30-219.us-west-2.compute.amazonaws.com",
|
||||||
|
"public_ip": "35.162.30.219",
|
||||||
|
"root_block_device.#": "1",
|
||||||
|
"root_block_device.0.delete_on_termination": "true",
|
||||||
|
"root_block_device.0.iops": "100",
|
||||||
|
"root_block_device.0.volume_size": "8",
|
||||||
|
"root_block_device.0.volume_type": "gp2",
|
||||||
|
"security_groups.#": "0",
|
||||||
|
"source_dest_check": "true",
|
||||||
|
"subnet_id": "subnet-d2c0f0a6",
|
||||||
|
"tags.%": "0",
|
||||||
|
"tenancy": "default",
|
||||||
|
"vpc_security_group_ids.#": "1",
|
||||||
|
"vpc_security_group_ids.619359045": "sg-9fc7dcfd"
|
||||||
|
},
|
||||||
|
"meta": {
|
||||||
|
"schema_version": "1"
|
||||||
|
},
|
||||||
|
"tainted": false
|
||||||
|
},
|
||||||
|
"deposed": [],
|
||||||
|
"provider": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"depends_on": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
62
test/test_tfstate_file3/.terraform/terraform.tfstate
Normal file
62
test/test_tfstate_file3/.terraform/terraform.tfstate
Normal file
|
@ -0,0 +1,62 @@
|
||||||
|
{
|
||||||
|
"version": 3,
|
||||||
|
"terraform_version": "0.7.10",
|
||||||
|
"serial": 0,
|
||||||
|
"lineage": "d03ecdf7-8be0-4593-a952-1d8127875119",
|
||||||
|
"modules": [
|
||||||
|
{
|
||||||
|
"path": [
|
||||||
|
"default_backend"
|
||||||
|
],
|
||||||
|
"outputs": {},
|
||||||
|
"resources": {
|
||||||
|
"aws_instance.ubuntu-1404": {
|
||||||
|
"type": "aws_instance",
|
||||||
|
"depends_on": [],
|
||||||
|
"primary": {
|
||||||
|
"id": "i-84d10edb",
|
||||||
|
"attributes": {
|
||||||
|
"ami": "ami-9abea4fb",
|
||||||
|
"associate_public_ip_address": "true",
|
||||||
|
"availability_zone": "us-west-2b",
|
||||||
|
"disable_api_termination": "false",
|
||||||
|
"ebs_block_device.#": "0",
|
||||||
|
"ebs_optimized": "false",
|
||||||
|
"ephemeral_block_device.#": "0",
|
||||||
|
"iam_instance_profile": "",
|
||||||
|
"id": "i-84d10edb",
|
||||||
|
"instance_state": "running",
|
||||||
|
"instance_type": "t2.micro",
|
||||||
|
"key_name": "",
|
||||||
|
"monitoring": "false",
|
||||||
|
"network_interface_id": "eni-46544f07",
|
||||||
|
"private_dns": "ip-172-31-25-244.us-west-2.compute.internal",
|
||||||
|
"private_ip": "172.31.25.244",
|
||||||
|
"public_dns": "ec2-35-162-30-219.us-west-2.compute.amazonaws.com",
|
||||||
|
"public_ip": "35.162.30.219",
|
||||||
|
"root_block_device.#": "1",
|
||||||
|
"root_block_device.0.delete_on_termination": "true",
|
||||||
|
"root_block_device.0.iops": "100",
|
||||||
|
"root_block_device.0.volume_size": "8",
|
||||||
|
"root_block_device.0.volume_type": "gp2",
|
||||||
|
"security_groups.#": "0",
|
||||||
|
"source_dest_check": "true",
|
||||||
|
"subnet_id": "subnet-d2c0f0a6",
|
||||||
|
"tags.%": "0",
|
||||||
|
"tenancy": "default",
|
||||||
|
"vpc_security_group_ids.#": "1",
|
||||||
|
"vpc_security_group_ids.619359045": "sg-9fc7dcfd"
|
||||||
|
},
|
||||||
|
"meta": {
|
||||||
|
"schema_version": "1"
|
||||||
|
},
|
||||||
|
"tainted": false
|
||||||
|
},
|
||||||
|
"deposed": [],
|
||||||
|
"provider": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"depends_on": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
1
test/tfvar_files/test.tfvars
Normal file
1
test/tfvar_files/test.tfvars
Normal file
|
@ -0,0 +1 @@
|
||||||
|
test_var = "True"
|
|
@ -5,12 +5,12 @@ variable "test_var" {
|
||||||
provider "archive" {}
|
provider "archive" {}
|
||||||
|
|
||||||
variable "test_list_var" {
|
variable "test_list_var" {
|
||||||
type = "list"
|
type = list(string)
|
||||||
default = ["a", "b"]
|
default = ["a", "b"]
|
||||||
}
|
}
|
||||||
|
|
||||||
variable "test_map_var" {
|
variable "test_map_var" {
|
||||||
type = "map"
|
type = map
|
||||||
|
|
||||||
default = {
|
default = {
|
||||||
"a" = "a"
|
"a" = "a"
|
||||||
|
@ -19,13 +19,13 @@ variable "test_map_var" {
|
||||||
}
|
}
|
||||||
|
|
||||||
output "test_output" {
|
output "test_output" {
|
||||||
value = "${var.test_var}"
|
value = var.test_var
|
||||||
}
|
}
|
||||||
|
|
||||||
output "test_list_output" {
|
output "test_list_output" {
|
||||||
value = "${var.test_list_var}"
|
value = var.test_list_var
|
||||||
}
|
}
|
||||||
|
|
||||||
output "test_map_output" {
|
output "test_map_output" {
|
||||||
value = "${var.test_map_var}"
|
value = var.test_map_var
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,16 +1,16 @@
|
||||||
variable "ami" {
|
variable "ami" {
|
||||||
default = "foo"
|
default = "foo"
|
||||||
type = "string"
|
type = string
|
||||||
}
|
}
|
||||||
|
|
||||||
variable "list" {
|
variable "list" {
|
||||||
default = []
|
default = []
|
||||||
type = "list"
|
type = list
|
||||||
}
|
}
|
||||||
|
|
||||||
variable "map" {
|
variable "map" {
|
||||||
default = {}
|
default = {}
|
||||||
type = "map"
|
type = map
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "aws_instance" "bar" {
|
resource "aws_instance" "bar" {
|
||||||
|
|
12
tox.ini
12
tox.ini
|
@ -1,12 +0,0 @@
|
||||||
# content of: tox.ini , put in same dir as setup.py
|
|
||||||
[tox]
|
|
||||||
envlist = py27, py35, py36
|
|
||||||
[testenv]
|
|
||||||
deps=pytest
|
|
||||||
commands=py.test test
|
|
||||||
|
|
||||||
[travis]
|
|
||||||
python =
|
|
||||||
2.7: py27
|
|
||||||
3.5: py35
|
|
||||||
3.6: py36
|
|
Loading…
Reference in a new issue