Skip to content

Commit 6cd2a0f

Browse files
authored
feat: Added support for variety of options for source_path, closes claranet#12 (claranet#25)
1 parent 038f877 commit 6cd2a0f

File tree

2 files changed

+86
-48
lines changed

2 files changed

+86
-48
lines changed

Diff for: README.md

+3-3
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ Not supported, yet:
1717

1818
This Terraform module is the part of [serverless.tf framework](https://github.com/antonbabenko/serverless.tf), which aims to simplify all operations when working with the serverless in Terraform:
1919

20-
1. Build and install dependencies - [read more](#build).
20+
1. Build and install dependencies - [read more](#build). Requires Python 3.6 or newer.
2121
2. Create, store, and use deployment packages - [read more](#package).
2222
3. Create, update, and publish AWS Lambda Function and Lambda Layer - [see usage](#usage).
2323
4. Create static and dynamic aliases for AWS Lambda Function - [see usage](#usage), see [modules/alias](https://github.com/terraform-aws-modules/terraform-aws-lambda/tree/master/modules/alias).
@@ -306,7 +306,7 @@ module "lambda" {
306306

307307
This is one of the most complicated part done by the module and normally you don't have to know internals.
308308

309-
`package.py` is Python script which does it. Make sure, Python 3.7 or newer is installed. The main functions of the script are to generate a filename of zip-archive based on the content of the files, verify if zip-archive has been already created, and create zip-archive only when it is necessary (during `apply`, not `plan`).
309+
`package.py` is Python script which does it. Make sure, Python 3.6 or newer is installed. The main functions of the script are to generate a filename of zip-archive based on the content of the files, verify if zip-archive has been already created, and create zip-archive only when it is necessary (during `apply`, not `plan`).
310310

311311
Hash of zip-archive created with the same content of the files is always identical which prevents unnecessary force-updates of the Lambda resources unless content modifies. If you need to have different filenames for the same content you can specify extra string argument `hash_extra`.
312312

@@ -315,7 +315,7 @@ When calling this module multiple times in one execution to create packages with
315315

316316
## <a name="build"></a> Debug
317317

318-
Building and packaging has been historically hard to debug (especially with Terraform), so we made an effort to make it easier for user to see debug info. There are 3 different debug levels: `DEBUG` - to see only what is happening during planning phase, `DEBUG2` - to see all logging values, `DEBUG3` - to see all logging values and env variables (be careful sharing your env variables as they may contain secrets!).
318+
Building and packaging has been historically hard to debug (especially with Terraform), so we made an effort to make it easier for user to see debug info. There are 3 different debug levels: `DEBUG` - to see only what is happening during planning phase and how a zip file content filtering in case of applied patterns, `DEBUG2` - to see more logging output, `DEBUG3` - to see all logging values, `DUMP_ENV` - to see all logging values and env variables (be careful sharing your env variables as they may contain secrets!).
319319

320320
User can specify debug level like this:
321321

Diff for: package.py

+83-45
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,8 @@
22

33
import sys
44

5-
if sys.version_info < (3, 7):
6-
raise RuntimeError("A python version 3.7 or newer is required")
5+
if sys.version_info < (3, 6):
6+
raise RuntimeError("A python version 3.6 or newer is required")
77

88
import os
99
import re
@@ -26,12 +26,15 @@
2626
import logging
2727

2828
PY38 = sys.version_info >= (3, 8)
29+
PY37 = sys.version_info >= (3, 7)
30+
PY36 = sys.version_info >= (3, 6)
2931

3032
################################################################################
3133
# Logging
3234

3335
DEBUG2 = 9
3436
DEBUG3 = 8
37+
DUMP_ENV = 1
3538

3639
log_handler = None
3740
log = logging.getLogger()
@@ -43,6 +46,7 @@ def configure_logging(use_tf_stderr=False):
4346

4447
logging.addLevelName(DEBUG2, 'DEBUG2')
4548
logging.addLevelName(DEBUG3, 'DEBUG3')
49+
logging.addLevelName(DUMP_ENV, 'DUMP_ENV')
4650

4751
class LogFormatter(logging.Formatter):
4852
default_format = '%(message)s'
@@ -139,28 +143,29 @@ def list_files(top_path, log=None):
139143
return results
140144

141145

142-
def dataclass(name, **fields):
143-
typ = type(name, (object,), {
144-
'__slots__': fields.keys(),
145-
'__getattr__': lambda *_: None,
146+
def dataclass(name):
147+
typ = type(name, (dict,), {
148+
'__getattr__': lambda self, x: self.get(x),
149+
'__init__': lambda self, **k: self.update(k),
146150
})
147-
for k, v in fields.items():
148-
setattr(typ, k, v)
149151
return typ
150152

151153

152154
def datatree(name, **fields):
153-
def decode_json(v):
155+
def decode_json(k, v):
154156
if v and isinstance(v, str) and v[0] in '"[{':
155157
try:
156-
return json.loads(v)
158+
o = json.loads(v)
159+
if isinstance(o, dict):
160+
return dataclass(k)(**o)
161+
return o
157162
except json.JSONDecodeError:
158163
pass
159164
return v
160165

161-
return dataclass(name, **dict(((
162-
k, datatree(k, **v) if isinstance(v, dict) else decode_json(v))
163-
for k, v in fields.items())))()
166+
return dataclass(name)(**dict(((
167+
k, datatree(k, **v) if isinstance(v, dict) else decode_json(k, v))
168+
for k, v in fields.items())))
164169

165170

166171
def timestamp_now_ns():
@@ -291,7 +296,11 @@ def __enter__(self):
291296
return self.open()
292297

293298
def __exit__(self, exc_type, exc_val, exc_tb):
294-
self.close(failed=exc_type is not None)
299+
if exc_type is not None:
300+
self._log.exception("Error during zip archive creation")
301+
self.close(failed=True)
302+
raise SystemExit(1)
303+
self.close()
295304

296305
def _ensure_open(self):
297306
if self._zip is not None:
@@ -380,10 +389,11 @@ def _write_zinfo(self, zinfo, filename,
380389
else:
381390
zinfo.compress_type = self._compress_type
382391

383-
if compresslevel is not None:
384-
zinfo._compresslevel = compresslevel
385-
else:
386-
zinfo._compresslevel = self._compresslevel
392+
if PY37:
393+
if compresslevel is not None:
394+
zinfo._compresslevel = compresslevel
395+
else:
396+
zinfo._compresslevel = self._compresslevel
387397

388398
if zinfo.is_dir():
389399
with zip._lock:
@@ -438,9 +448,9 @@ def _zinfo_from_file(filename, arcname=None, *, strict_timestamps=True):
438448
isdir = stat.S_ISDIR(st.st_mode)
439449
mtime = time.localtime(st.st_mtime)
440450
date_time = mtime[0:6]
441-
if not strict_timestamps and date_time[0] < 1980:
451+
if strict_timestamps and date_time[0] < 1980:
442452
date_time = (1980, 1, 1, 0, 0, 0)
443-
elif not strict_timestamps and date_time[0] > 2107:
453+
elif strict_timestamps and date_time[0] > 2107:
444454
date_time = (2107, 12, 31, 23, 59, 59)
445455
# Create ZipInfo instance to store file information
446456
if arcname is None:
@@ -631,11 +641,19 @@ def pip_requirements_step(path, prefix=None, required=False):
631641
hash(requirements)
632642

633643
def commands_step(path, commands):
634-
path = os.path.normpath(path)
644+
if path:
645+
path = os.path.normpath(path)
635646
batch = []
636647
for c in commands:
637648
if isinstance(c, str):
638649
if c.startswith(':zip'):
650+
if path:
651+
hash(path)
652+
else:
653+
# If path doesn't defined for a block with
654+
# commands it will be set to Terraform's
655+
# current working directory
656+
path = query.paths.cwd
639657
if batch:
640658
step('sh', path, '\n'.join(batch))
641659
batch.clear()
@@ -644,15 +662,18 @@ def commands_step(path, commands):
644662
_, _path, prefix = c
645663
prefix = prefix.strip()
646664
_path = os.path.normpath(os.path.join(path, _path))
647-
step('zip', _path, prefix)
665+
step('zip:embedded', _path, prefix)
666+
elif len(c) == 2:
667+
prefix = None
668+
_, _path = c
669+
step('zip:embedded', _path, prefix)
648670
elif len(c) == 1:
649671
prefix = None
650-
step('zip', path, prefix)
672+
step('zip:embedded', path, prefix)
651673
else:
652674
raise ValueError(
653-
':zip command can have zero '
654-
'or 2 arguments: {}'.format(c))
655-
hash(path)
675+
":zip invalid call signature, use: "
676+
"':zip [path [prefix_in_zip]]'")
656677
else:
657678
batch.append(c)
658679

@@ -706,30 +727,30 @@ def execute(self, build_plan, zip_stream, query):
706727

707728
for action in build_plan:
708729
cmd = action[0]
709-
if cmd == 'zip':
730+
if cmd.startswith('zip'):
731+
ts = 0 if cmd == 'zip:embedded' else None
710732
source_path, prefix = action[1:]
711733
if sh_work_dir:
712734
if source_path != sh_work_dir:
713735
source_path = sh_work_dir
714-
if pf:
715-
for path in pf.filter(source_path, prefix):
716-
if os.path.isdir(source_path):
717-
arcname = os.path.relpath(path, source_path)
718-
else:
719-
arcname = os.path.basename(path)
720-
zs.write_file(path, prefix, arcname)
736+
if pf:
737+
self._zip_write_with_filter(zs, pf, source_path, prefix,
738+
timestamp=ts)
721739
else:
722740
if os.path.isdir(source_path):
723-
zs.write_dirs(source_path, prefix=prefix)
741+
zs.write_dirs(source_path, prefix=prefix, timestamp=ts)
724742
else:
725-
zs.write_file(source_path, prefix=prefix)
743+
zs.write_file(source_path, prefix=prefix, timestamp=ts)
726744
elif cmd == 'pip':
727745
runtime, pip_requirements, prefix = action[1:]
728-
with install_pip_requirements(query, zs,
729-
pip_requirements) as rd:
746+
with install_pip_requirements(query, pip_requirements) as rd:
730747
if rd:
731-
# XXX: timestamp=0 - what actually do with it?
732-
zs.write_dirs(rd, prefix=prefix, timestamp=0)
748+
if pf:
749+
self._zip_write_with_filter(zs, pf, rd, prefix,
750+
timestamp=0)
751+
else:
752+
# XXX: timestamp=0 - what actually do with it?
753+
zs.write_dirs(rd, prefix=prefix, timestamp=0)
733754
elif cmd == 'sh':
734755
r, w = os.pipe()
735756
side_ch = os.fdopen(r)
@@ -750,9 +771,22 @@ def execute(self, build_plan, zip_stream, query):
750771
elif cmd == 'clear:filter':
751772
pf = None
752773

774+
@staticmethod
775+
def _zip_write_with_filter(zip_stream, path_filter, source_path, prefix,
776+
timestamp=None):
777+
for path in path_filter.filter(source_path, prefix):
778+
if os.path.isdir(source_path):
779+
arcname = os.path.relpath(path, source_path)
780+
else:
781+
arcname = os.path.basename(path)
782+
zip_stream.write_file(path, prefix, arcname, timestamp=timestamp)
783+
753784

754785
@contextmanager
755-
def install_pip_requirements(query, zip_stream, requirements_file):
786+
def install_pip_requirements(query, requirements_file):
787+
# TODO:
788+
# 1. Emit files instead of temp_dir
789+
756790
if not os.path.exists(requirements_file):
757791
yield
758792
return
@@ -892,10 +926,15 @@ def prepare_command(args):
892926
# Load the query.
893927
query_data = json.load(sys.stdin)
894928

895-
if log.isEnabledFor(DEBUG3):
929+
if log.isEnabledFor(DUMP_ENV):
896930
log.debug('ENV: %s', json.dumps(dict(os.environ), indent=2))
897931
if log.isEnabledFor(DEBUG2):
898-
log.debug('QUERY: %s', json.dumps(query_data, indent=2))
932+
if log.isEnabledFor(DEBUG3):
933+
log.debug('QUERY: %s', json.dumps(query_data, indent=2))
934+
else:
935+
log_excludes = ('source_path', 'hash_extra_paths', 'paths')
936+
qd = {k: v for k, v in query_data.items() if k not in log_excludes}
937+
log.debug('QUERY (excerpt): %s', json.dumps(qd, indent=2))
899938

900939
query = datatree('prepare_query', **query_data)
901940

@@ -944,7 +983,6 @@ def prepare_command(args):
944983
build_data = {
945984
'filename': filename,
946985
'runtime': runtime,
947-
'source_path': source_path,
948986
'artifacts_dir': artifacts_dir,
949987
'build_plan': build_plan,
950988
}
@@ -1050,7 +1088,7 @@ def zip_cmd(args):
10501088
subprocess.call([zipinfo, args.zipfile])
10511089
log.debug('-' * 80)
10521090
log.debug('Source code hash: %s',
1053-
source_code_hash(open(args.zipfile, 'rb').read()))
1091+
source_code_hash(open(args.zipfile, 'rb').read()))
10541092

10551093
p = hidden_parser('zip', help='Zip folder with provided files timestamp')
10561094
p.set_defaults(command=zip_cmd)

0 commit comments

Comments
 (0)