34
34
DEBUG3 = 8
35
35
36
36
log_handler = None
37
- logger = logging .getLogger ()
38
- cmd_logger = logging .getLogger ('cmd' )
37
+ log = logging .getLogger ()
38
+ cmd_log = logging .getLogger ('cmd' )
39
39
40
40
41
41
def configure_logging (use_tf_stderr = False ):
@@ -71,8 +71,8 @@ def formatMessage(self, record):
71
71
log_handler = logging .StreamHandler (stream = log_stream )
72
72
log_handler .setFormatter (LogFormatter ())
73
73
74
- logger .addHandler (log_handler )
75
- logger .setLevel (logging .INFO )
74
+ log .addHandler (log_handler )
75
+ log .setLevel (logging .INFO )
76
76
77
77
78
78
################################################################################
@@ -88,7 +88,7 @@ def shlex_join(split_command):
88
88
89
89
def abort (message ):
90
90
"""Exits with an error message."""
91
- logger .error (message )
91
+ log .error (message )
92
92
sys .exit (1 )
93
93
94
94
@@ -97,7 +97,7 @@ def cd(path, silent=False):
97
97
"""Changes the working directory."""
98
98
cwd = os .getcwd ()
99
99
if not silent :
100
- cmd_logger .info ('cd %s' , shlex .quote (path ))
100
+ cmd_log .info ('cd %s' , shlex .quote (path ))
101
101
try :
102
102
os .chdir (path )
103
103
yield
@@ -110,20 +110,20 @@ def tempdir():
110
110
"""Creates a temporary directory and then deletes it afterwards."""
111
111
prefix = 'terraform-aws-lambda-'
112
112
path = tempfile .mkdtemp (prefix = prefix )
113
- cmd_logger .info ('mktemp -d %sXXXXXXXX # %s' , prefix , shlex .quote (path ))
113
+ cmd_log .info ('mktemp -d %sXXXXXXXX # %s' , prefix , shlex .quote (path ))
114
114
try :
115
115
yield path
116
116
finally :
117
117
shutil .rmtree (path )
118
118
119
119
120
- def list_files (top_path , logger = None ):
120
+ def list_files (top_path , log = None ):
121
121
"""
122
122
Returns a sorted list of all files in a directory.
123
123
"""
124
124
125
- if logger :
126
- logger = logger .getChild ('ls' )
125
+ if log :
126
+ log = log .getChild ('ls' )
127
127
128
128
results = []
129
129
@@ -132,8 +132,8 @@ def list_files(top_path, logger=None):
132
132
file_path = os .path .join (root , file_name )
133
133
relative_path = os .path .relpath (file_path , top_path )
134
134
results .append (relative_path )
135
- if logger :
136
- logger .debug (relative_path )
135
+ if log :
136
+ log .debug (relative_path )
137
137
138
138
results .sort ()
139
139
return results
@@ -205,30 +205,30 @@ def emit_dir_content(base_dir):
205
205
206
206
207
207
def generate_content_hash (source_paths ,
208
- hash_func = hashlib .sha256 , logger = None ):
208
+ hash_func = hashlib .sha256 , log = None ):
209
209
"""
210
210
Generate a content hash of the source paths.
211
211
"""
212
212
213
- if logger :
214
- logger = logger .getChild ('hash' )
213
+ if log :
214
+ log = log .getChild ('hash' )
215
215
216
216
hash_obj = hash_func ()
217
217
218
218
for source_path in source_paths :
219
219
if os .path .isdir (source_path ):
220
220
source_dir = source_path
221
- _logger = logger if logger .isEnabledFor (DEBUG3 ) else None
222
- for source_file in list_files (source_dir , logger = _logger ):
221
+ _log = log if log .isEnabledFor (DEBUG3 ) else None
222
+ for source_file in list_files (source_dir , log = _log ):
223
223
update_hash (hash_obj , source_dir , source_file )
224
- if logger :
225
- logger .debug (os .path .join (source_dir , source_file ))
224
+ if log :
225
+ log .debug (os .path .join (source_dir , source_file ))
226
226
else :
227
227
source_dir = os .path .dirname (source_path )
228
228
source_file = os .path .relpath (source_path , source_dir )
229
229
update_hash (hash_obj , source_dir , source_file )
230
- if logger :
231
- logger .debug (source_path )
230
+ if log :
231
+ log .debug (source_path )
232
232
233
233
return hash_obj
234
234
@@ -268,14 +268,14 @@ def __init__(self, zip_filename,
268
268
self ._compresslevel = compresslevel
269
269
self ._zip = None
270
270
271
- self ._logger = logging .getLogger ('zip' )
271
+ self ._log = logging .getLogger ('zip' )
272
272
273
273
def open (self ):
274
274
if self ._tmp_filename :
275
275
raise zipfile .BadZipFile ("ZipStream object can't be reused" )
276
276
self ._ensure_base_path (self .filename )
277
277
self ._tmp_filename = '{}.tmp' .format (self .filename )
278
- self ._logger .info ("creating '%s' archive" , self .filename )
278
+ self ._log .info ("creating '%s' archive" , self .filename )
279
279
self ._zip = zipfile .ZipFile (self ._tmp_filename , "w" ,
280
280
self ._compress_type )
281
281
return self
@@ -304,7 +304,7 @@ def _ensure_base_path(self, zip_filename):
304
304
archive_dir = os .path .dirname (zip_filename )
305
305
306
306
if archive_dir and not os .path .exists (archive_dir ):
307
- self ._logger .info ("creating %s" , archive_dir )
307
+ self ._log .info ("creating %s" , archive_dir )
308
308
os .makedirs (archive_dir )
309
309
310
310
def write_dirs (self , * base_dirs , prefix = None , timestamp = None ):
@@ -313,7 +313,7 @@ def write_dirs(self, *base_dirs, prefix=None, timestamp=None):
313
313
"""
314
314
self ._ensure_open ()
315
315
for base_dir in base_dirs :
316
- self ._logger .info ("adding content of directory '%s'" , base_dir )
316
+ self ._log .info ("adding content of directory '%s'" , base_dir )
317
317
for path in emit_dir_content (base_dir ):
318
318
arcname = os .path .relpath (path , base_dir )
319
319
self ._write_file (path , prefix , arcname , timestamp )
@@ -338,7 +338,7 @@ def _write_file(self, file_path, prefix=None, name=None, timestamp=None):
338
338
arcname = name if name else os .path .basename (file_path )
339
339
if prefix :
340
340
arcname = os .path .join (prefix , arcname )
341
- self ._logger .info ("adding '%s'" , arcname )
341
+ self ._log .info ("adding '%s'" , arcname )
342
342
zinfo = self ._make_zinfo_from_file (file_path , arcname )
343
343
if timestamp is None :
344
344
timestamp = self .timestamp
@@ -501,12 +501,12 @@ class ZipContentFilter:
501
501
def __init__ (self ):
502
502
self ._rules = None
503
503
self ._excludes = set ()
504
- self ._logger = logging .getLogger ('zip' )
504
+ self ._log = logging .getLogger ('zip' )
505
505
506
506
def compile (self , patterns ):
507
507
rules = []
508
508
for p in patterns_list (patterns ):
509
- self ._logger .debug ("pattern '%s'" , p )
509
+ self ._log .debug ("pattern '%s'" , p )
510
510
if p .startswith ('!' ):
511
511
r = re .compile (p [1 :])
512
512
rules .append ((operator .not_ , r ))
@@ -559,21 +559,21 @@ def emit_file(fpath, opath):
559
559
else :
560
560
for root , dirs , files in os .walk (path ):
561
561
o , d = norm_path (path , root )
562
- logger .info ('od: %s %s' , o , d )
562
+ log .info ('od: %s %s' , o , d )
563
563
if root != path :
564
564
yield from emit_dir (d , o )
565
565
for name in files :
566
566
o , f = norm_path (path , root , name )
567
- logger .info ('of: %s %s' , o , f )
567
+ log .info ('of: %s %s' , o , f )
568
568
yield from emit_file (f , o )
569
569
570
570
571
571
class BuildPlanManager :
572
572
""""""
573
573
574
- def __init__ (self , logger = None ):
574
+ def __init__ (self , log = None ):
575
575
self ._source_paths = None
576
- self ._logger = logger or logging .root
576
+ self ._log = log or logging .root
577
577
578
578
def hash (self , extra_paths ):
579
579
if not self ._source_paths :
@@ -584,9 +584,9 @@ def hash(self, extra_paths):
584
584
# Generate a hash based on file names and content. Also use the
585
585
# runtime value, build command, and content of the build paths
586
586
# because they can have an effect on the resulting archive.
587
- self ._logger .debug ("Computing content hash on files..." )
587
+ self ._log .debug ("Computing content hash on files..." )
588
588
content_hash = generate_content_hash (content_hash_paths ,
589
- logger = self ._logger )
589
+ log = self ._log )
590
590
return content_hash
591
591
592
592
def plan (self , source_path , query ):
@@ -723,7 +723,7 @@ def execute(self, build_plan, zip_stream, query):
723
723
os .close (w )
724
724
sh_work_dir = side_ch .read ().strip ()
725
725
p .wait ()
726
- logger .info ('WD: %s' , sh_work_dir )
726
+ log .info ('WD: %s' , sh_work_dir )
727
727
side_ch .close ()
728
728
elif cmd == 'set:filter' :
729
729
patterns = action [1 ]
@@ -745,7 +745,7 @@ def install_pip_requirements(query, zip_stream, requirements_file):
745
745
746
746
working_dir = os .getcwd ()
747
747
748
- logger .info ('Installing python requirements: %s' , requirements_file )
748
+ log .info ('Installing python requirements: %s' , requirements_file )
749
749
with tempdir () as temp_dir :
750
750
requirements_filename = os .path .basename (requirements_file )
751
751
target_file = os .path .join (temp_dir , requirements_filename )
@@ -779,7 +779,7 @@ def install_pip_requirements(query, zip_stream, requirements_file):
779
779
pip_cache_dir = pip_cache_dir
780
780
))
781
781
else :
782
- cmd_logger .info (shlex_join (pip_command ))
782
+ cmd_log .info (shlex_join (pip_command ))
783
783
log_handler and log_handler .flush ()
784
784
check_call (pip_command )
785
785
@@ -796,7 +796,7 @@ def docker_build_command(build_root, docker_file=None, tag=None):
796
796
docker_cmd .extend (['--tag' , tag ])
797
797
docker_cmd .append (build_root )
798
798
799
- cmd_logger .info (shlex_join (docker_cmd ))
799
+ cmd_log .info (shlex_join (docker_cmd ))
800
800
log_handler and log_handler .flush ()
801
801
return docker_cmd
802
802
@@ -853,7 +853,7 @@ def docker_run_command(build_root, command, runtime,
853
853
docker_cmd .extend ([shell , '-c' ])
854
854
docker_cmd .extend (command )
855
855
856
- cmd_logger .info (shlex_join (docker_cmd ))
856
+ cmd_log .info (shlex_join (docker_cmd ))
857
857
log_handler and log_handler .flush ()
858
858
return docker_cmd
859
859
@@ -869,15 +869,15 @@ def prepare_command(args):
869
869
Outputs a filename and a command to run if the archive needs to be built.
870
870
"""
871
871
872
- logger = logging .getLogger ('prepare' )
872
+ log = logging .getLogger ('prepare' )
873
873
874
874
# Load the query.
875
875
query_data = json .load (sys .stdin )
876
876
877
- if logger .isEnabledFor (DEBUG3 ):
878
- logger .debug ('ENV: %s' , json .dumps (dict (os .environ ), indent = 2 ))
879
- if logger .isEnabledFor (DEBUG2 ):
880
- logger .debug ('QUERY: %s' , json .dumps (query_data , indent = 2 ))
877
+ if log .isEnabledFor (DEBUG3 ):
878
+ log .debug ('ENV: %s' , json .dumps (dict (os .environ ), indent = 2 ))
879
+ if log .isEnabledFor (DEBUG2 ):
880
+ log .debug ('QUERY: %s' , json .dumps (query_data , indent = 2 ))
881
881
882
882
query = datatree ('prepare_query' , ** query_data )
883
883
@@ -891,11 +891,11 @@ def prepare_command(args):
891
891
recreate_missing_package = yesno_bool (args .recreate_missing_package )
892
892
docker = query .docker
893
893
894
- bpm = BuildPlanManager (logger = logger )
894
+ bpm = BuildPlanManager (log = log )
895
895
build_plan = bpm .plan (source_path , query )
896
896
897
- if logger .isEnabledFor (DEBUG2 ):
898
- logger .debug ('BUILD_PLAN: %s' , json .dumps (build_plan , indent = 2 ))
897
+ if log .isEnabledFor (DEBUG2 ):
898
+ log .debug ('BUILD_PLAN: %s' , json .dumps (build_plan , indent = 2 ))
899
899
900
900
# Expand a Terraform path.<cwd|root|module> references
901
901
hash_extra_paths = [p .format (path = tf_paths ) for p in hash_extra_paths ]
@@ -958,12 +958,12 @@ def build_command(args):
958
958
Installs dependencies with pip automatically.
959
959
"""
960
960
961
- logger = logging .getLogger ('build' )
961
+ log = logging .getLogger ('build' )
962
962
963
- if logger .isEnabledFor (DEBUG3 ):
964
- logger .debug ('ENV: %s' , json .dumps (dict (os .environ ), indent = 2 ))
965
- if logger .isEnabledFor (DEBUG2 ):
966
- logger .debug ('CMD: python3 %s' , shlex_join (sys .argv ))
963
+ if log .isEnabledFor (DEBUG3 ):
964
+ log .debug ('ENV: %s' , json .dumps (dict (os .environ ), indent = 2 ))
965
+ if log .isEnabledFor (DEBUG2 ):
966
+ log .debug ('CMD: python3 %s' , shlex_join (sys .argv ))
967
967
968
968
with open (args .build_plan_file ) as f :
969
969
query_data = json .load (f )
@@ -979,20 +979,20 @@ def build_command(args):
979
979
timestamp = int (_timestamp )
980
980
981
981
if os .path .exists (filename ) and not args .force :
982
- logger .info ('Reused: %s' , shlex .quote (filename ))
982
+ log .info ('Reused: %s' , shlex .quote (filename ))
983
983
return
984
984
985
985
# Zip up the build plan and write it to the target filename.
986
986
# This will be used by the Lambda function as the source code package.
987
987
with ZipWriteStream (filename ) as zs :
988
- bpm = BuildPlanManager (logger = logger )
988
+ bpm = BuildPlanManager (log = log )
989
989
bpm .execute (build_plan , zs , query )
990
990
991
991
os .utime (filename , ns = (timestamp , timestamp ))
992
- logger .info ('Created: %s' , shlex .quote (filename ))
993
- if logger .isEnabledFor (logging .DEBUG ):
992
+ log .info ('Created: %s' , shlex .quote (filename ))
993
+ if log .isEnabledFor (logging .DEBUG ):
994
994
with open (filename , 'rb' ) as f :
995
- logger .info ('Base64sha256: %s' , source_code_hash (f .read ()))
995
+ log .info ('Base64sha256: %s' , source_code_hash (f .read ()))
996
996
997
997
998
998
def add_hidden_commands (sub_parsers ):
@@ -1022,16 +1022,16 @@ def hidden_parser(name, **kwargs):
1022
1022
1023
1023
def zip_cmd (args ):
1024
1024
if args .verbose :
1025
- logger .setLevel (logging .DEBUG )
1025
+ log .setLevel (logging .DEBUG )
1026
1026
with ZipWriteStream (args .zipfile ) as zs :
1027
1027
zs .write_dirs (* args .dir , timestamp = args .timestamp )
1028
- if logger .isEnabledFor (logging .DEBUG ):
1028
+ if log .isEnabledFor (logging .DEBUG ):
1029
1029
zipinfo = shutil .which ('zipinfo' )
1030
1030
if zipinfo :
1031
- logger .debug ('-' * 80 )
1031
+ log .debug ('-' * 80 )
1032
1032
subprocess .call ([zipinfo , args .zipfile ])
1033
- logger .debug ('-' * 80 )
1034
- logger .debug ('Source code hash: %s' ,
1033
+ log .debug ('-' * 80 )
1034
+ log .debug ('Source code hash: %s' ,
1035
1035
source_code_hash (open (args .zipfile , 'rb' ).read ()))
1036
1036
1037
1037
p = hidden_parser ('zip' , help = 'Zip folder with provided files timestamp' )
0 commit comments