Add pre-commit and apply Black format
This commit is contained in:
parent
61a76803d4
commit
151c5dc92a
12 changed files with 321 additions and 259 deletions
|
@ -4,4 +4,3 @@ commit = True
|
|||
tag = False
|
||||
|
||||
[bumpversion:file:setup.py]
|
||||
|
||||
|
|
28
.pre-commit-config.yaml
Normal file
28
.pre-commit-config.yaml
Normal file
|
@ -0,0 +1,28 @@
|
|||
default_language_version:
|
||||
python: python3.6
|
||||
fail_fast: true
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v3.1.0 # v2.1.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: check-docstring-first
|
||||
- id: check-json
|
||||
- id: check-merge-conflict
|
||||
- id: check-toml
|
||||
- id: check-yaml
|
||||
- id: debug-statements
|
||||
- id: requirements-txt-fixer
|
||||
- repo: https://github.com/pre-commit/mirrors-isort
|
||||
rev: v4.3.21
|
||||
hooks:
|
||||
- id: isort
|
||||
- repo: https://github.com/lovesegfault/beautysh
|
||||
rev: 6.0.1
|
||||
hooks:
|
||||
- id: beautysh
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 19.10b0
|
||||
hooks:
|
||||
- id: black
|
|
@ -137,11 +137,3 @@ This make api caller don't have a general rule to follow but to do
|
|||
a exhaustive method implementation which I don't prefer to.
|
||||
Therefore I end-up with using `IsFlagged` or `IsNotFlagged` as value of option
|
||||
like `-no-color` and `True/False` value reserved for option like `refresh=true`
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -1,13 +1,12 @@
|
|||
import subprocess
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from python_terraform.tfstate import Tfstate
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
COMMAND_WITH_SUBCOMMANDS = {"workspace"}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import json
|
||||
import os
|
||||
import logging
|
||||
import os
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -18,7 +18,7 @@ class Tfstate:
|
|||
|
||||
Parses then as JSON and put the result into the object.
|
||||
"""
|
||||
logger.debug('read data from %s', file_path)
|
||||
logger.debug("read data from %s", file_path)
|
||||
if os.path.exists(file_path):
|
||||
with open(file_path) as f:
|
||||
json_data = json.load(f)
|
||||
|
@ -27,6 +27,6 @@ class Tfstate:
|
|||
tf_state.tfstate_file = file_path
|
||||
return tf_state
|
||||
|
||||
logger.debug('%s is not exist', file_path)
|
||||
logger.debug("%s is not exist", file_path)
|
||||
|
||||
return Tfstate()
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
tox-pyenv
|
||||
pytest
|
||||
tox
|
||||
tox-pyenv
|
||||
|
|
47
setup.py
47
setup.py
|
@ -7,12 +7,13 @@ except ImportError:
|
|||
from distutils.core import setup
|
||||
|
||||
dependencies = []
|
||||
module_name = 'python-terraform'
|
||||
short_description = 'This is a python module provide a wrapper ' \
|
||||
'of terraform command line tool'
|
||||
module_name = "python-terraform"
|
||||
short_description = (
|
||||
"This is a python module provide a wrapper " "of terraform command line tool"
|
||||
)
|
||||
|
||||
try:
|
||||
with open('DESCRIPTION.rst') as f:
|
||||
with open("DESCRIPTION.rst") as f:
|
||||
long_description = f.read()
|
||||
except IOError:
|
||||
long_description = short_description
|
||||
|
@ -20,36 +21,36 @@ except IOError:
|
|||
|
||||
setup(
|
||||
name=module_name,
|
||||
version='0.10.2',
|
||||
url='https://github.com/beelit94/python-terraform',
|
||||
license='MIT',
|
||||
author='Freddy Tan',
|
||||
author_email='beelit94@gmail.com',
|
||||
version="0.10.2",
|
||||
url="https://github.com/beelit94/python-terraform",
|
||||
license="MIT",
|
||||
author="Freddy Tan",
|
||||
author_email="beelit94@gmail.com",
|
||||
description=short_description,
|
||||
long_description=long_description,
|
||||
packages=['python_terraform'],
|
||||
packages=["python_terraform"],
|
||||
package_data={},
|
||||
platforms='any',
|
||||
platforms="any",
|
||||
install_requires=dependencies,
|
||||
classifiers=[
|
||||
# As from http://pypi.python.org/pypi?%3Aaction=list_classifiers
|
||||
# 'Development Status :: 1 - Planning',
|
||||
# 'Development Status :: 2 - Pre-Alpha',
|
||||
# 'Development Status :: 3 - Alpha',
|
||||
'Development Status :: 4 - Beta',
|
||||
"Development Status :: 4 - Beta",
|
||||
# 'Development Status :: 5 - Production/Stable',
|
||||
# 'Development Status :: 6 - Mature',
|
||||
# 'Development Status :: 7 - Inactive',
|
||||
'Environment :: Console',
|
||||
'Intended Audience :: Developers',
|
||||
'License :: OSI Approved :: MIT License',
|
||||
'Operating System :: POSIX',
|
||||
'Operating System :: MacOS',
|
||||
'Operating System :: Unix',
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Operating System :: POSIX",
|
||||
"Operating System :: MacOS",
|
||||
"Operating System :: Unix",
|
||||
# 'Operating System :: Windows',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
||||
]
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 2",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
],
|
||||
)
|
||||
|
|
|
@ -2,14 +2,15 @@ try:
|
|||
from cStringIO import StringIO # Python 2
|
||||
except ImportError:
|
||||
from io import StringIO
|
||||
from python_terraform import *
|
||||
from contextlib import contextmanager
|
||||
import pytest
|
||||
import os
|
||||
import fnmatch
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import fnmatch
|
||||
from contextlib import contextmanager
|
||||
|
||||
import pytest
|
||||
from python_terraform import *
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
root_logger = logging.getLogger()
|
||||
|
@ -19,77 +20,96 @@ current_path = os.path.dirname(os.path.realpath(__file__))
|
|||
FILE_PATH_WITH_SPACE_AND_SPACIAL_CHARS = "test 'test.out!"
|
||||
STRING_CASES = [
|
||||
[
|
||||
lambda x: x.generate_cmd_string('apply', 'the_folder',
|
||||
no_color=IsFlagged),
|
||||
"terraform apply -no-color the_folder"
|
||||
lambda x: x.generate_cmd_string("apply", "the_folder", no_color=IsFlagged),
|
||||
"terraform apply -no-color the_folder",
|
||||
],
|
||||
[
|
||||
lambda x: x.generate_cmd_string('push', 'path', vcs=True,
|
||||
token='token',
|
||||
atlas_address='url'),
|
||||
"terraform push -vcs=true -token=token -atlas-address=url path"
|
||||
lambda x: x.generate_cmd_string(
|
||||
"push", "path", vcs=True, token="token", atlas_address="url"
|
||||
),
|
||||
"terraform push -vcs=true -token=token -atlas-address=url path",
|
||||
],
|
||||
]
|
||||
]
|
||||
|
||||
CMD_CASES = [
|
||||
['method', 'expected_output', 'expected_ret_code', 'expected_exception', 'expected_logs', 'folder'],
|
||||
[
|
||||
"method",
|
||||
"expected_output",
|
||||
"expected_ret_code",
|
||||
"expected_exception",
|
||||
"expected_logs",
|
||||
"folder",
|
||||
],
|
||||
[
|
||||
[
|
||||
lambda x: x.cmd('plan', 'var_to_output', no_color=IsFlagged, var={'test_var': 'test'}),
|
||||
lambda x: x.cmd(
|
||||
"plan", "var_to_output", no_color=IsFlagged, var={"test_var": "test"}
|
||||
),
|
||||
# Expected output varies by terraform version
|
||||
["doesn't need to do anything", # Terraform < 0.10.7 (used in travis env)
|
||||
"no\nactions need to be performed"], # Terraform >= 0.10.7
|
||||
[
|
||||
"doesn't need to do anything", # Terraform < 0.10.7 (used in travis env)
|
||||
"no\nactions need to be performed",
|
||||
], # Terraform >= 0.10.7
|
||||
0,
|
||||
False,
|
||||
'',
|
||||
'var_to_output'
|
||||
"",
|
||||
"var_to_output",
|
||||
],
|
||||
# try import aws instance
|
||||
[
|
||||
lambda x: x.cmd('import', 'aws_instance.foo', 'i-abcd1234', no_color=IsFlagged),
|
||||
'',
|
||||
lambda x: x.cmd(
|
||||
"import", "aws_instance.foo", "i-abcd1234", no_color=IsFlagged
|
||||
),
|
||||
"",
|
||||
1,
|
||||
False,
|
||||
'Command: terraform import -no-color aws_instance.foo i-abcd1234',
|
||||
''
|
||||
"Command: terraform import -no-color aws_instance.foo i-abcd1234",
|
||||
"",
|
||||
],
|
||||
# try import aws instance with raise_on_error
|
||||
[
|
||||
lambda x: x.cmd('import', 'aws_instance.foo', 'i-abcd1234', no_color=IsFlagged, raise_on_error=True),
|
||||
'',
|
||||
lambda x: x.cmd(
|
||||
"import",
|
||||
"aws_instance.foo",
|
||||
"i-abcd1234",
|
||||
no_color=IsFlagged,
|
||||
raise_on_error=True,
|
||||
),
|
||||
"",
|
||||
1,
|
||||
True,
|
||||
'Command: terraform import -no-color aws_instance.foo i-abcd1234',
|
||||
''
|
||||
"Command: terraform import -no-color aws_instance.foo i-abcd1234",
|
||||
"",
|
||||
],
|
||||
# test with space and special character in file path
|
||||
[
|
||||
lambda x: x.cmd('plan', 'var_to_output', out=FILE_PATH_WITH_SPACE_AND_SPACIAL_CHARS),
|
||||
'',
|
||||
lambda x: x.cmd(
|
||||
"plan", "var_to_output", out=FILE_PATH_WITH_SPACE_AND_SPACIAL_CHARS
|
||||
),
|
||||
"",
|
||||
0,
|
||||
False,
|
||||
'',
|
||||
'var_to_output'
|
||||
"",
|
||||
"var_to_output",
|
||||
],
|
||||
# test workspace command (commands with subcommand)
|
||||
[
|
||||
lambda x: x.cmd('workspace', 'show', no_color=IsFlagged),
|
||||
'',
|
||||
lambda x: x.cmd("workspace", "show", no_color=IsFlagged),
|
||||
"",
|
||||
0,
|
||||
False,
|
||||
'Command: terraform workspace show -no-color',
|
||||
''
|
||||
"Command: terraform workspace show -no-color",
|
||||
"",
|
||||
],
|
||||
],
|
||||
]
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
@pytest.fixture(scope="function")
|
||||
def fmt_test_file(request):
|
||||
target = os.path.join(current_path, 'bad_fmt', 'test.backup')
|
||||
orgin = os.path.join(current_path, 'bad_fmt', 'test.tf')
|
||||
shutil.copy(orgin,
|
||||
target)
|
||||
target = os.path.join(current_path, "bad_fmt", "test.backup")
|
||||
orgin = os.path.join(current_path, "bad_fmt", "test.tf")
|
||||
shutil.copy(orgin, target)
|
||||
|
||||
def td():
|
||||
shutil.move(target, orgin)
|
||||
|
@ -120,6 +140,7 @@ def workspace_setup_teardown():
|
|||
Create and tear down a workspace
|
||||
*Use as a contextmanager*
|
||||
"""
|
||||
|
||||
@contextmanager
|
||||
def wrapper(workspace_name, create=True, delete=True, *args, **kwargs):
|
||||
tf = Terraform(working_dir=current_path)
|
||||
|
@ -128,7 +149,7 @@ def workspace_setup_teardown():
|
|||
tf.create_workspace(workspace_name, *args, **kwargs)
|
||||
yield tf
|
||||
if delete:
|
||||
tf.set_workspace('default')
|
||||
tf.set_workspace("default")
|
||||
tf.delete_workspace(workspace_name)
|
||||
|
||||
yield wrapper
|
||||
|
@ -139,9 +160,7 @@ class TestTerraform(object):
|
|||
""" teardown any state that was previously setup with a setup_method
|
||||
call.
|
||||
"""
|
||||
exclude = ['test_tfstate_file',
|
||||
'test_tfstate_file2',
|
||||
'test_tfstate_file3']
|
||||
exclude = ["test_tfstate_file", "test_tfstate_file2", "test_tfstate_file3"]
|
||||
|
||||
def purge(dir, pattern):
|
||||
for root, dirnames, filenames in os.walk(dir):
|
||||
|
@ -153,14 +172,12 @@ class TestTerraform(object):
|
|||
d = os.path.join(root, dirname)
|
||||
shutil.rmtree(d)
|
||||
|
||||
purge('.', '*.tfstate')
|
||||
purge('.', '*.tfstate.backup')
|
||||
purge('.', '*.terraform')
|
||||
purge('.', FILE_PATH_WITH_SPACE_AND_SPACIAL_CHARS)
|
||||
purge(".", "*.tfstate")
|
||||
purge(".", "*.tfstate.backup")
|
||||
purge(".", "*.terraform")
|
||||
purge(".", FILE_PATH_WITH_SPACE_AND_SPACIAL_CHARS)
|
||||
|
||||
@pytest.mark.parametrize([
|
||||
"method", "expected"
|
||||
], STRING_CASES)
|
||||
@pytest.mark.parametrize(["method", "expected"], STRING_CASES)
|
||||
def test_generate_cmd_string(self, method, expected):
|
||||
tf = Terraform(working_dir=current_path)
|
||||
result = method(tf)
|
||||
|
@ -170,7 +187,16 @@ class TestTerraform(object):
|
|||
assert s in result
|
||||
|
||||
@pytest.mark.parametrize(*CMD_CASES)
|
||||
def test_cmd(self, method, expected_output, expected_ret_code, expected_exception, expected_logs, string_logger, folder):
|
||||
def test_cmd(
|
||||
self,
|
||||
method,
|
||||
expected_output,
|
||||
expected_ret_code,
|
||||
expected_exception,
|
||||
expected_logs,
|
||||
string_logger,
|
||||
folder,
|
||||
):
|
||||
tf = Terraform(working_dir=current_path)
|
||||
tf.init(folder)
|
||||
try:
|
||||
|
@ -183,7 +209,7 @@ class TestTerraform(object):
|
|||
err = e.err
|
||||
|
||||
logs = string_logger()
|
||||
logs = logs.replace('\n', '')
|
||||
logs = logs.replace("\n", "")
|
||||
if isinstance(expected_output, list):
|
||||
ok = False
|
||||
for xo in expected_output:
|
||||
|
@ -200,154 +226,161 @@ class TestTerraform(object):
|
|||
@pytest.mark.parametrize(
|
||||
("folder", "variables", "var_files", "expected_output", "options"),
|
||||
[
|
||||
("var_to_output",
|
||||
{'test_var': 'test'}, None, "test_output=test", {}),
|
||||
("var_to_output", {'test_list_var': ['c', 'd']}, None, "test_list_output=[c,d]", {}),
|
||||
("var_to_output", {'test_map_var': {"c": "c", "d": "d"}}, None, "test_map_output={a=ab=bc=cd=d}", {}),
|
||||
("var_to_output", {'test_map_var': {"c": "c", "d": "d"}}, 'var_to_output/test_map_var.json', "test_map_output={a=ab=bc=cd=de=ef=f}", {}),
|
||||
("var_to_output", {}, None, "\x1b[0m\x1b[1m\x1b[32mApplycomplete!", {"no_color": IsNotFlagged})
|
||||
])
|
||||
("var_to_output", {"test_var": "test"}, None, "test_output=test", {}),
|
||||
(
|
||||
"var_to_output",
|
||||
{"test_list_var": ["c", "d"]},
|
||||
None,
|
||||
"test_list_output=[c,d]",
|
||||
{},
|
||||
),
|
||||
(
|
||||
"var_to_output",
|
||||
{"test_map_var": {"c": "c", "d": "d"}},
|
||||
None,
|
||||
"test_map_output={a=ab=bc=cd=d}",
|
||||
{},
|
||||
),
|
||||
(
|
||||
"var_to_output",
|
||||
{"test_map_var": {"c": "c", "d": "d"}},
|
||||
"var_to_output/test_map_var.json",
|
||||
"test_map_output={a=ab=bc=cd=de=ef=f}",
|
||||
{},
|
||||
),
|
||||
(
|
||||
"var_to_output",
|
||||
{},
|
||||
None,
|
||||
"\x1b[0m\x1b[1m\x1b[32mApplycomplete!",
|
||||
{"no_color": IsNotFlagged},
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_apply(self, folder, variables, var_files, expected_output, options):
|
||||
tf = Terraform(working_dir=current_path, variables=variables, var_file=var_files)
|
||||
tf = Terraform(
|
||||
working_dir=current_path, variables=variables, var_file=var_files
|
||||
)
|
||||
# after 0.10.0 we always need to init
|
||||
tf.init(folder)
|
||||
ret, out, err = tf.apply(folder, **options)
|
||||
assert ret == 0
|
||||
assert expected_output in out.replace('\n', '').replace(' ', '')
|
||||
assert err == ''
|
||||
assert expected_output in out.replace("\n", "").replace(" ", "")
|
||||
assert err == ""
|
||||
|
||||
def test_apply_with_var_file(self, string_logger):
|
||||
tf = Terraform(working_dir=current_path)
|
||||
|
||||
tf.init()
|
||||
tf.apply(var_file=os.path.join(current_path, 'tfvar_file', 'test.tfvars'))
|
||||
tf.apply(var_file=os.path.join(current_path, "tfvar_file", "test.tfvars"))
|
||||
logs = string_logger()
|
||||
logs = logs.split('\n')
|
||||
logs = logs.split("\n")
|
||||
for log in logs:
|
||||
if log.startswith('command: terraform apply'):
|
||||
assert log.count('-var-file=') == 1
|
||||
if log.startswith("command: terraform apply"):
|
||||
assert log.count("-var-file=") == 1
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
['cmd', 'args', 'options'],
|
||||
["cmd", "args", "options"],
|
||||
[
|
||||
# bool value
|
||||
('fmt', ['bad_fmt'], {'list': False, 'diff': False})
|
||||
]
|
||||
("fmt", ["bad_fmt"], {"list": False, "diff": False})
|
||||
],
|
||||
)
|
||||
def test_options(self, cmd, args, options, fmt_test_file):
|
||||
tf = Terraform(working_dir=current_path)
|
||||
ret, out, err = getattr(tf, cmd)(*args, **options)
|
||||
assert ret == 0
|
||||
assert out == ''
|
||||
assert out == ""
|
||||
|
||||
def test_state_data(self):
|
||||
cwd = os.path.join(current_path, 'test_tfstate_file')
|
||||
tf = Terraform(working_dir=cwd, state='tfstate.test')
|
||||
cwd = os.path.join(current_path, "test_tfstate_file")
|
||||
tf = Terraform(working_dir=cwd, state="tfstate.test")
|
||||
tf.read_state_file()
|
||||
assert tf.tfstate.modules[0]['path'] == ['root']
|
||||
assert tf.tfstate.modules[0]["path"] == ["root"]
|
||||
|
||||
def test_state_default(self):
|
||||
cwd = os.path.join(current_path, 'test_tfstate_file2')
|
||||
cwd = os.path.join(current_path, "test_tfstate_file2")
|
||||
tf = Terraform(working_dir=cwd)
|
||||
tf.read_state_file()
|
||||
assert tf.tfstate.modules[0]['path'] == ['default']
|
||||
assert tf.tfstate.modules[0]["path"] == ["default"]
|
||||
|
||||
def test_state_default_backend(self):
|
||||
cwd = os.path.join(current_path, 'test_tfstate_file3')
|
||||
cwd = os.path.join(current_path, "test_tfstate_file3")
|
||||
tf = Terraform(working_dir=cwd)
|
||||
tf.read_state_file()
|
||||
assert tf.tfstate.modules[0]['path'] == ['default_backend']
|
||||
assert tf.tfstate.modules[0]["path"] == ["default_backend"]
|
||||
|
||||
def test_pre_load_state_data(self):
|
||||
cwd = os.path.join(current_path, 'test_tfstate_file')
|
||||
tf = Terraform(working_dir=cwd, state='tfstate.test')
|
||||
assert tf.tfstate.modules[0]['path'] == ['root']
|
||||
cwd = os.path.join(current_path, "test_tfstate_file")
|
||||
tf = Terraform(working_dir=cwd, state="tfstate.test")
|
||||
assert tf.tfstate.modules[0]["path"] == ["root"]
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("folder", 'variables'),
|
||||
[
|
||||
("var_to_output", {'test_var': 'test'})
|
||||
]
|
||||
("folder", "variables"), [("var_to_output", {"test_var": "test"})]
|
||||
)
|
||||
def test_override_default(self, folder, variables):
|
||||
tf = Terraform(working_dir=current_path, variables=variables)
|
||||
tf.init(folder)
|
||||
ret, out, err = tf.apply(folder, var={'test_var': 'test2'},
|
||||
no_color=IsNotFlagged)
|
||||
out = out.replace('\n', '')
|
||||
assert '\x1b[0m\x1b[1m\x1b[32mApply' in out
|
||||
out = tf.output('test_output')
|
||||
assert 'test2' in out
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("param"),
|
||||
[
|
||||
({}),
|
||||
({'module': 'test2'}),
|
||||
]
|
||||
ret, out, err = tf.apply(
|
||||
folder, var={"test_var": "test2"}, no_color=IsNotFlagged
|
||||
)
|
||||
out = out.replace("\n", "")
|
||||
assert "\x1b[0m\x1b[1m\x1b[32mApply" in out
|
||||
out = tf.output("test_output")
|
||||
assert "test2" in out
|
||||
|
||||
@pytest.mark.parametrize(("param"), [({}), ({"module": "test2"}),])
|
||||
def test_output(self, param, string_logger):
|
||||
tf = Terraform(working_dir=current_path, variables={'test_var': 'test'})
|
||||
tf.init('var_to_output')
|
||||
tf.apply('var_to_output')
|
||||
result = tf.output('test_output', **param)
|
||||
regex = re.compile("terraform output (-module=test2 -json|-json -module=test2) test_output")
|
||||
tf = Terraform(working_dir=current_path, variables={"test_var": "test"})
|
||||
tf.init("var_to_output")
|
||||
tf.apply("var_to_output")
|
||||
result = tf.output("test_output", **param)
|
||||
regex = re.compile(
|
||||
"terraform output (-module=test2 -json|-json -module=test2) test_output"
|
||||
)
|
||||
log_str = string_logger()
|
||||
if param:
|
||||
assert re.search(regex, log_str), log_str
|
||||
else:
|
||||
assert result == 'test'
|
||||
assert result == "test"
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("param"),
|
||||
[
|
||||
({}),
|
||||
({'module': 'test2'}),
|
||||
]
|
||||
)
|
||||
@pytest.mark.parametrize(("param"), [({}), ({"module": "test2"}),])
|
||||
def test_output_full_value(self, param, string_logger):
|
||||
tf = Terraform(working_dir=current_path, variables={'test_var': 'test'})
|
||||
tf.init('var_to_output')
|
||||
tf.apply('var_to_output')
|
||||
result = tf.output('test_output', **dict(param, full_value=True))
|
||||
regex = re.compile("terraform output (-module=test2 -json|-json -module=test2) test_output")
|
||||
tf = Terraform(working_dir=current_path, variables={"test_var": "test"})
|
||||
tf.init("var_to_output")
|
||||
tf.apply("var_to_output")
|
||||
result = tf.output("test_output", **dict(param, full_value=True))
|
||||
regex = re.compile(
|
||||
"terraform output (-module=test2 -json|-json -module=test2) test_output"
|
||||
)
|
||||
log_str = string_logger()
|
||||
if param:
|
||||
assert re.search(regex, log_str), log_str
|
||||
else:
|
||||
assert result['value'] == 'test'
|
||||
assert result["value"] == "test"
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("param"),
|
||||
[
|
||||
({}),
|
||||
({'module': 'test2'}),
|
||||
]
|
||||
)
|
||||
@pytest.mark.parametrize(("param"), [({}), ({"module": "test2"}),])
|
||||
def test_output_all(self, param, string_logger):
|
||||
tf = Terraform(working_dir=current_path, variables={'test_var': 'test'})
|
||||
tf.init('var_to_output')
|
||||
tf.apply('var_to_output')
|
||||
tf = Terraform(working_dir=current_path, variables={"test_var": "test"})
|
||||
tf.init("var_to_output")
|
||||
tf.apply("var_to_output")
|
||||
result = tf.output(**param)
|
||||
regex = re.compile("terraform output (-module=test2 -json|-json -module=test2)")
|
||||
log_str = string_logger()
|
||||
if param:
|
||||
assert re.search(regex, log_str), log_str
|
||||
else:
|
||||
assert result['test_output']['value'] == 'test'
|
||||
assert result["test_output"]["value"] == "test"
|
||||
|
||||
def test_destroy(self):
|
||||
tf = Terraform(working_dir=current_path, variables={'test_var': 'test'})
|
||||
tf.init('var_to_output')
|
||||
ret, out, err = tf.destroy('var_to_output')
|
||||
tf = Terraform(working_dir=current_path, variables={"test_var": "test"})
|
||||
tf.init("var_to_output")
|
||||
ret, out, err = tf.destroy("var_to_output")
|
||||
assert ret == 0
|
||||
assert 'Destroy complete! Resources: 0 destroyed.' in out
|
||||
assert "Destroy complete! Resources: 0 destroyed." in out
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("plan", "variables", "expected_ret"),
|
||||
[
|
||||
('vars_require_input', {}, 1)
|
||||
]
|
||||
("plan", "variables", "expected_ret"), [("vars_require_input", {}, 1)]
|
||||
)
|
||||
def test_plan(self, plan, variables, expected_ret):
|
||||
tf = Terraform(working_dir=current_path, variables=variables)
|
||||
|
@ -355,103 +388,113 @@ class TestTerraform(object):
|
|||
assert ret == expected_ret
|
||||
|
||||
def test_fmt(self, fmt_test_file):
|
||||
tf = Terraform(working_dir=current_path, variables={'test_var': 'test'})
|
||||
tf = Terraform(working_dir=current_path, variables={"test_var": "test"})
|
||||
ret, out, err = tf.fmt(diff=True)
|
||||
assert ret == 0
|
||||
|
||||
def test_import(self, string_logger):
|
||||
tf = Terraform(working_dir=current_path)
|
||||
tf.import_cmd('aws_instance.foo', 'i-abc1234', no_color=IsFlagged)
|
||||
assert 'Command: terraform import -no-color aws_instance.foo i-abc1234' in string_logger()
|
||||
tf.import_cmd("aws_instance.foo", "i-abc1234", no_color=IsFlagged)
|
||||
assert (
|
||||
"Command: terraform import -no-color aws_instance.foo i-abc1234"
|
||||
in string_logger()
|
||||
)
|
||||
|
||||
def test_create_workspace(self, workspace_setup_teardown):
|
||||
workspace_name = 'test'
|
||||
workspace_name = "test"
|
||||
with workspace_setup_teardown(workspace_name, create=False) as tf:
|
||||
ret, out, err = tf.create_workspace('test')
|
||||
ret, out, err = tf.create_workspace("test")
|
||||
assert ret == 0
|
||||
assert err == ''
|
||||
assert err == ""
|
||||
|
||||
def test_create_workspace_with_args(
|
||||
self, workspace_setup_teardown, string_logger
|
||||
):
|
||||
workspace_name = 'test'
|
||||
state_file_path = os.path.join(current_path, 'test_tfstate_file2', 'terraform.tfstate')
|
||||
def test_create_workspace_with_args(self, workspace_setup_teardown, string_logger):
|
||||
workspace_name = "test"
|
||||
state_file_path = os.path.join(
|
||||
current_path, "test_tfstate_file2", "terraform.tfstate"
|
||||
)
|
||||
with workspace_setup_teardown(workspace_name, create=False) as tf:
|
||||
ret, out, err = tf.create_workspace('test', current_path, no_color=IsFlagged)
|
||||
ret, out, err = tf.create_workspace(
|
||||
"test", current_path, no_color=IsFlagged
|
||||
)
|
||||
|
||||
assert ret == 0
|
||||
assert err == ''
|
||||
assert err == ""
|
||||
|
||||
logs = string_logger()
|
||||
logs = logs.replace('\n', '')
|
||||
expected_log = 'Command: terraform workspace new -no-color test {}'.format(current_path)
|
||||
logs = logs.replace("\n", "")
|
||||
expected_log = "Command: terraform workspace new -no-color test {}".format(
|
||||
current_path
|
||||
)
|
||||
assert expected_log in logs
|
||||
|
||||
def test_set_workspace(self, workspace_setup_teardown):
|
||||
workspace_name = 'test'
|
||||
workspace_name = "test"
|
||||
with workspace_setup_teardown(workspace_name) as tf:
|
||||
ret, out, err = tf.set_workspace(workspace_name)
|
||||
assert ret == 0
|
||||
assert err == ''
|
||||
assert err == ""
|
||||
|
||||
def test_set_workspace_with_args(
|
||||
self, workspace_setup_teardown, string_logger):
|
||||
workspace_name = 'test'
|
||||
def test_set_workspace_with_args(self, workspace_setup_teardown, string_logger):
|
||||
workspace_name = "test"
|
||||
with workspace_setup_teardown(workspace_name) as tf:
|
||||
ret, out, err = tf.set_workspace(workspace_name, current_path, no_color=IsFlagged)
|
||||
ret, out, err = tf.set_workspace(
|
||||
workspace_name, current_path, no_color=IsFlagged
|
||||
)
|
||||
|
||||
assert ret == 0
|
||||
assert err == ''
|
||||
assert err == ""
|
||||
|
||||
logs = string_logger()
|
||||
logs = logs.replace('\n', '')
|
||||
expected_log = 'Command: terraform workspace select -no-color test {}'.format(current_path)
|
||||
logs = logs.replace("\n", "")
|
||||
expected_log = "Command: terraform workspace select -no-color test {}".format(
|
||||
current_path
|
||||
)
|
||||
assert expected_log in logs
|
||||
|
||||
def test_show_workspace(self, workspace_setup_teardown):
|
||||
workspace_name = 'test'
|
||||
workspace_name = "test"
|
||||
with workspace_setup_teardown(workspace_name) as tf:
|
||||
ret, out, err = tf.show_workspace()
|
||||
assert ret == 0
|
||||
assert err == ''
|
||||
assert err == ""
|
||||
|
||||
def test_show_workspace_with_no_color(
|
||||
self, workspace_setup_teardown, string_logger
|
||||
):
|
||||
workspace_name = 'test'
|
||||
workspace_name = "test"
|
||||
with workspace_setup_teardown(workspace_name) as tf:
|
||||
ret, out, err = tf.show_workspace(no_color=IsFlagged)
|
||||
|
||||
assert ret == 0
|
||||
assert err == ''
|
||||
assert err == ""
|
||||
|
||||
logs = string_logger()
|
||||
logs = logs.replace('\n', '')
|
||||
expected_log = 'Command: terraform workspace show -no-color'
|
||||
logs = logs.replace("\n", "")
|
||||
expected_log = "Command: terraform workspace show -no-color"
|
||||
assert expected_log in logs
|
||||
|
||||
def test_delete_workspace(self, workspace_setup_teardown):
|
||||
workspace_name = 'test'
|
||||
workspace_name = "test"
|
||||
with workspace_setup_teardown(workspace_name, delete=False) as tf:
|
||||
tf.set_workspace('default')
|
||||
tf.set_workspace("default")
|
||||
ret, out, err = tf.delete_workspace(workspace_name)
|
||||
assert ret == 0
|
||||
assert err == ''
|
||||
assert err == ""
|
||||
|
||||
def test_delete_workspace_with_args(
|
||||
self, workspace_setup_teardown, string_logger
|
||||
):
|
||||
workspace_name = 'test'
|
||||
def test_delete_workspace_with_args(self, workspace_setup_teardown, string_logger):
|
||||
workspace_name = "test"
|
||||
with workspace_setup_teardown(workspace_name, delete=False) as tf:
|
||||
tf.set_workspace('default')
|
||||
tf.set_workspace("default")
|
||||
ret, out, err = tf.delete_workspace(
|
||||
workspace_name, current_path, force=IsFlagged,
|
||||
)
|
||||
|
||||
assert ret == 0
|
||||
assert err == ''
|
||||
assert err == ""
|
||||
|
||||
logs = string_logger()
|
||||
logs = logs.replace('\n', '')
|
||||
expected_log = 'Command: terraform workspace delete -force test {}'.format(current_path)
|
||||
logs = logs.replace("\n", "")
|
||||
expected_log = "Command: terraform workspace delete -force test {}".format(
|
||||
current_path
|
||||
)
|
||||
assert expected_log in logs
|
||||
|
|
Loading…
Reference in a new issue