Skip to content

Commit d3d1e37

Browse files
authored
Create cache hash after command (#64)
* add tg cache handling * add tg test cases * generate hash before/after cmd * add exclude_directories * Revert "add tg test cases" This reverts commit 458960d. * revert tg cases but keep heredocs
1 parent fd53b94 commit d3d1e37

2 files changed

Lines changed: 74 additions & 40 deletions

File tree

test/test_cache.py

Lines changed: 24 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -70,10 +70,6 @@ def test_use_cache(tf):
7070
),
7171
], indirect=True)
7272
def test_no_use_cache(tf):
73-
"""
74-
Ensures cache is not used and runs the execute_command() for every call of
75-
the method
76-
"""
7773
expected_call_count = 2
7874
for method in cache_methods:
7975
with patch.object(tf, 'execute_command', wraps=tf.execute_command) as mock_execute_command:
@@ -84,6 +80,10 @@ def test_no_use_cache(tf):
8480

8581
@pytest.mark.parametrize("tf", [True], indirect=True)
8682
def test_use_cache_with_same_tf_var_file(tf, tmp_path):
83+
"""
84+
Ensures cache is used if the same tf_var_file argument is passed
85+
within subsequent method calls
86+
"""
8787
tf_var_file_methods = ["plan", "apply", "destroy"]
8888

8989
tf_vars_file = tmp_path / (str(uuid.uuid4()) + '.json')
@@ -99,6 +99,10 @@ def test_use_cache_with_same_tf_var_file(tf, tmp_path):
9999

100100
@pytest.mark.parametrize("tf", [True], indirect=True)
101101
def test_use_cache_with_new_tf_var_file(tf, tmp_path):
102+
"""
103+
Ensures cache is not used if a different tf_var_file argument is passed
104+
within subsequent method calls
105+
"""
102106
tf_var_file_methods = ["plan", "apply", "destroy"]
103107
expected_call_count = 2
104108

@@ -116,6 +120,10 @@ def test_use_cache_with_new_tf_var_file(tf, tmp_path):
116120

117121
@pytest.mark.parametrize("tf", [True], indirect=True)
118122
def test_use_cache_with_new_extra_files(tf, tmp_path):
123+
"""
124+
Ensures cache is not used if a different extra_files argument is passed
125+
within subsequent method calls
126+
"""
119127
expected_call_count = 2
120128
tf_vars_file = tmp_path / (str(uuid.uuid4()) + '.json')
121129
tf_vars_file.write_text(json.dumps({"foo": "old"}))
@@ -130,6 +138,10 @@ def test_use_cache_with_new_extra_files(tf, tmp_path):
130138

131139
@pytest.mark.parametrize("tf", [True], indirect=True)
132140
def test_use_cache_with_same_extra_files(tf, tmp_path):
141+
"""
142+
Ensures cache is used if the same extra_files argument is passed
143+
within subsequent method calls
144+
"""
133145
tf_vars_file = tmp_path / (str(uuid.uuid4()) + '.json')
134146
tf_vars_file.write_text(json.dumps({"foo": "old"}))
135147

@@ -142,6 +154,10 @@ def test_use_cache_with_same_extra_files(tf, tmp_path):
142154

143155
@pytest.mark.parametrize("tf", [True], indirect=True)
144156
def test_use_cache_with_new_env(tf):
157+
"""
158+
Ensures cache is not used if the env attribute is updated
159+
before subsequent method calls
160+
"""
145161
expected_call_count = 2
146162
for method in cache_methods:
147163
with patch.object(tf, 'execute_command', wraps=tf.execute_command) as mock_execute_command:
@@ -167,6 +183,10 @@ def dummy_tf_filepath(tf):
167183

168184
@pytest.mark.parametrize("tf", [True], indirect=True)
169185
def test_use_cache_with_new_tf_content(tf, dummy_tf_filepath):
186+
"""
187+
Ensures cache is not used if the tfdir directory is updated
188+
before subsequent method calls
189+
"""
170190
expected_call_count = 2
171191
for method in cache_methods:
172192
with patch.object(tf, 'execute_command', wraps=tf.execute_command) as mock_execute_command:

tftest.py

Lines changed: 50 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -375,21 +375,58 @@ def _abspath(self, path):
375375
"""Make relative path absolute from base dir."""
376376
return path if os.path.isabs(path) else os.path.join(self._basedir, path)
377377

378-
def _dirhash(self, directory, hash, ignore_hidden=False, excluded_extensions=[]):
378+
def _dirhash(self, directory, hash, ignore_hidden=False, exclude_directories=[], excluded_extensions=[]):
379+
"""Returns hash of directory's file contents"""
379380
assert Path(directory).is_dir()
380-
for path in sorted(Path(directory).iterdir(), key=lambda p: str(p).lower()):
381+
try:
382+
dir_iter = sorted(Path(directory).iterdir(),
383+
key=lambda p: str(p).lower())
384+
except FileNotFoundError:
385+
return hash
386+
for path in dir_iter:
381387
if path.is_file():
382-
if not ignore_hidden and path.name.startswith("."):
388+
if ignore_hidden and path.name.startswith("."):
383389
continue
384390
if path.suffix in excluded_extensions:
385391
continue
386392
with open(path, "rb") as f:
387393
for chunk in iter(lambda: f.read(4096), b""):
388394
hash.update(chunk)
389-
elif path.is_dir():
390-
hash = self._dirhash(path, hash, ignore_hidden=ignore_hidden)
395+
elif path.is_dir() and path.name not in exclude_directories:
396+
hash = self._dirhash(path, hash, ignore_hidden=ignore_hidden,
397+
exclude_directories=exclude_directories, excluded_extensions=excluded_extensions)
391398
return hash
392399

400+
def generate_cache_hash(self, method_kwargs):
401+
"""Returns a hash value using the instance's attributes and method keyword arguments"""
402+
params = {
403+
**{
404+
k: v for k, v in self.__dict__.items()
405+
# only uses instance attributes that are involved in the results of
406+
# the decorated method
407+
if k in ["binary", "_basedir", "tfdir", "_env"]
408+
},
409+
**method_kwargs,
410+
}
411+
412+
# creates hash of file contents
413+
for path_param in ["extra_files", "tf_var_file"]:
414+
if path_param in method_kwargs:
415+
if isinstance(method_kwargs[path_param], list):
416+
params[path_param] = [
417+
sha1(open(fp, 'rb').read()).hexdigest() for fp in method_kwargs[path_param]]
418+
else:
419+
params[path_param] = sha1(
420+
open(method_kwargs[path_param], 'rb').read()).hexdigest()
421+
422+
# creates hash of all file content within tfdir
423+
# excludes .terraform/, hidden files, tfstate files from being used for hash
424+
params["tfdir"] = self._dirhash(
425+
self.tfdir, sha1(), ignore_hidden=True, exclude_directories=[".terraform"], excluded_extensions=['.backup', '.tfstate']).hexdigest()
426+
427+
return sha1(json.dumps(params, sort_keys=True,
428+
default=str).encode("cp037")).hexdigest() + ".pickle"
429+
393430
def _cache(func):
394431

395432
def cache(self, **kwargs):
@@ -410,39 +447,9 @@ def cache(self, **kwargs):
410447

411448
cache_dir = self.cache_dir / \
412449
Path(self.tfdir.strip("/")) / Path(func.__name__)
413-
# creates cache dir if not exists
414450
cache_dir.mkdir(parents=True, exist_ok=True)
415451

416-
params = {
417-
**{
418-
k: v for k, v in self.__dict__.items()
419-
# only uses instance attributes that are involved in the results of
420-
# the decorated method
421-
if k in ["binary", "_basedir", "tfdir", "_env"]
422-
},
423-
**kwargs,
424-
}
425-
426-
# creates hash of file contents
427-
for path_param in ["extra_files", "tf_var_file"]:
428-
if path_param in kwargs:
429-
if isinstance(kwargs[path_param], list):
430-
params[path_param] = [
431-
sha1(open(fp, 'rb').read()).hexdigest() for fp in kwargs[path_param]]
432-
else:
433-
params[path_param] = sha1(
434-
open(kwargs[path_param], 'rb').read()).hexdigest()
435-
436-
# creates hash of all file content within tfdir
437-
# excludes hidden files from being used within hash (ignores .terraform/ or .terragrunt-cache/)
438-
# and excludes any local tfstate files
439-
440-
params["tfdir"] = self._dirhash(
441-
self.tfdir, sha1(), ignore_hidden=True, excluded_extensions=['.backup', '.tfstate']).hexdigest()
442-
443-
hash_filename = sha1(
444-
json.dumps(params, sort_keys=True,
445-
default=str).encode("cp037")).hexdigest() + ".pickle"
452+
hash_filename = self.generate_cache_hash(kwargs)
446453
cache_key = cache_dir / hash_filename
447454
_LOGGER.debug("Cache key: %s", cache_key)
448455

@@ -458,6 +465,13 @@ def cache(self, **kwargs):
458465
out = func(self, **kwargs)
459466

460467
if out:
468+
# the hash value will now include any changes
469+
# to the tfdir directory including any terragrunt
470+
# generated files
471+
hash_filename = self.generate_cache_hash(kwargs)
472+
cache_key = cache_dir / hash_filename
473+
_LOGGER.debug("Cache key: %s", cache_key)
474+
461475
_LOGGER.info("Writing command to cache")
462476
try:
463477
f = cache_key.open("wb")

0 commit comments

Comments
 (0)