Skip to content

Commit 778ceff

Browse files
committed
package.py - Added BuildPlanManager initial implementation
1 parent b547d89 commit 778ceff

File tree

2 files changed

+117
-64
lines changed

2 files changed

+117
-64
lines changed

package.py

+116-63
Original file line numberDiff line numberDiff line change
@@ -485,58 +485,123 @@ def str_int_to_timestamp(s):
485485
################################################################################
486486
# Building
487487

488+
class BuildPlanManager:
489+
""""""
490+
491+
def __init__(self):
492+
self._source_paths = None
493+
494+
def hash(self, extra_paths):
495+
if not self._source_paths:
496+
raise ValueError('BuildPlanManager.plan() should be called first')
497+
498+
content_hash_paths = self._source_paths + extra_paths
499+
500+
# Generate a hash based on file names and content. Also use the
501+
# runtime value, build command, and content of the build paths
502+
# because they can have an effect on the resulting archive.
503+
logger.debug("Computing content hash on files...")
504+
content_hash = generate_content_hash(content_hash_paths, logger=logger)
505+
return content_hash
506+
507+
def plan(self, source_path, query):
508+
claims = source_path
509+
if not isinstance(source_path, list):
510+
claims = [source_path]
511+
512+
source_paths = []
513+
build_plan = []
514+
for claim in claims:
515+
if isinstance(claim, str):
516+
# Validate the query.
517+
if not os.path.exists(claim):
518+
abort('source_path must be set.')
519+
build_plan.append(('zip', claim))
520+
source_paths.append(claim)
521+
elif isinstance(claim, dict):
522+
pass
523+
else:
524+
raise ValueError(
525+
'Unsupported source_path item: {}'.format(claim))
526+
527+
self._source_paths = source_paths
528+
return build_plan
529+
530+
def execute(self, build_plan, zip_stream, query):
531+
runtime = query.runtime
532+
533+
zs = zip_stream
534+
for action in build_plan:
535+
cmd, source_path = action
536+
if cmd == 'zip':
537+
if os.path.isdir(source_path):
538+
if runtime.startswith('python'):
539+
with install_pip_requirements(
540+
query, zs,
541+
os.path.join(source_path, 'requirements.txt')
542+
) as rd:
543+
rd and zs.write_dirs(
544+
rd, timestamp=0) # XXX: temp ts=0
545+
zs.write_dirs(source_path)
546+
else:
547+
zs.write_file(source_path)
548+
549+
488550
@contextmanager
489551
def install_pip_requirements(query, zip_stream, requirements_file):
552+
if not os.path.exists(requirements_file):
553+
yield
554+
return
555+
490556
runtime = query.runtime
491557
artifacts_dir = query.artifacts_dir
492558
docker = query.docker
493559

494560
working_dir = os.getcwd()
495561

496-
if os.path.exists(requirements_file):
497-
logger.info('Installing python requirements: %s', requirements_file)
498-
with tempdir() as temp_dir:
499-
requirements_filename = os.path.basename(requirements_file)
500-
target_file = os.path.join(temp_dir, requirements_filename)
501-
shutil.copyfile(requirements_file, target_file)
502-
503-
# Install dependencies into the temporary directory.
504-
with cd(temp_dir):
505-
if runtime.startswith('python3'):
506-
pip_command = ['pip3']
507-
else:
508-
pip_command = ['pip2']
509-
pip_command.extend([
510-
'install', '--no-compile',
511-
'--prefix=', '--target=.',
512-
'--requirement={}'.format(requirements_filename),
513-
])
514-
if docker:
515-
pip_cache_dir = docker.docker_pip_cache
516-
if pip_cache_dir:
517-
if isinstance(pip_cache_dir, str):
518-
pip_cache_dir = os.path.abspath(
519-
os.path.join(working_dir, pip_cache_dir))
520-
else:
521-
pip_cache_dir = os.path.abspath(os.path.join(
522-
working_dir, artifacts_dir, 'cache/pip'))
523-
524-
chown_mask = '{}:{}'.format(os.getuid(), os.getgid())
525-
shell_command = [shlex_join(pip_command), '&&',
526-
shlex_join(['chown', '-R',
527-
chown_mask, '.'])]
528-
shell_command = [' '.join(shell_command)]
529-
check_call(docker_run_command(
530-
'.', shell_command, runtime, shell=True,
531-
pip_cache_dir=pip_cache_dir
532-
))
533-
else:
534-
cmd_logger.info(shlex_join(pip_command))
535-
log_handler and log_handler.flush()
536-
check_call(pip_command)
562+
logger.info('Installing python requirements: %s', requirements_file)
563+
with tempdir() as temp_dir:
564+
requirements_filename = os.path.basename(requirements_file)
565+
target_file = os.path.join(temp_dir, requirements_filename)
566+
shutil.copyfile(requirements_file, target_file)
567+
568+
# Install dependencies into the temporary directory.
569+
with cd(temp_dir):
570+
if runtime.startswith('python3'):
571+
pip_command = ['pip3']
572+
else:
573+
pip_command = ['pip2']
574+
pip_command.extend([
575+
'install', '--no-compile',
576+
'--prefix=', '--target=.',
577+
'--requirement={}'.format(requirements_filename),
578+
])
579+
if docker:
580+
pip_cache_dir = docker.docker_pip_cache
581+
if pip_cache_dir:
582+
if isinstance(pip_cache_dir, str):
583+
pip_cache_dir = os.path.abspath(
584+
os.path.join(working_dir, pip_cache_dir))
585+
else:
586+
pip_cache_dir = os.path.abspath(os.path.join(
587+
working_dir, artifacts_dir, 'cache/pip'))
588+
589+
chown_mask = '{}:{}'.format(os.getuid(), os.getgid())
590+
shell_command = [shlex_join(pip_command), '&&',
591+
shlex_join(['chown', '-R',
592+
chown_mask, '.'])]
593+
shell_command = [' '.join(shell_command)]
594+
check_call(docker_run_command(
595+
'.', shell_command, runtime, shell=True,
596+
pip_cache_dir=pip_cache_dir
597+
))
598+
else:
599+
cmd_logger.info(shlex_join(pip_command))
600+
log_handler and log_handler.flush()
601+
check_call(pip_command)
537602

538-
os.remove(target_file)
539-
yield temp_dir
603+
os.remove(target_file)
604+
yield temp_dir
540605

541606

542607
def docker_build_command(build_root, docker_file=None, tag=None):
@@ -643,19 +708,13 @@ def prepare_command(args):
643708
recreate_missing_package = yesno_bool(args.recreate_missing_package)
644709
docker = query.docker
645710

646-
# Validate the query.
647-
if not os.path.exists(source_path):
648-
abort('source_path must be set.')
711+
bpm = BuildPlanManager()
712+
build_plan = bpm.plan(source_path, query)
649713

650714
# Expand a Terraform path.<cwd|root|module> references
651715
hash_extra_paths = [p.format(path=tf_paths) for p in hash_extra_paths]
652-
content_hash_paths = [source_path] + hash_extra_paths
653716

654-
# Generate a hash based on file names and content. Also use the
655-
# runtime value, build command, and content of the build paths
656-
# because they can have an effect on the resulting archive.
657-
logger.debug("Computing content hash on files...")
658-
content_hash = generate_content_hash(content_hash_paths, logger=logger)
717+
content_hash = bpm.hash(hash_extra_paths)
659718
content_hash.update(runtime.encode())
660719
content_hash.update(hash_extra.encode())
661720
content_hash = content_hash.hexdigest()
@@ -683,6 +742,7 @@ def prepare_command(args):
683742
'runtime': runtime,
684743
'source_path': source_path,
685744
'artifacts_dir': artifacts_dir,
745+
'build_plan': build_plan,
686746
}
687747
if docker:
688748
build_data['docker'] = docker
@@ -725,7 +785,7 @@ def build_command(args):
725785

726786
runtime = query.runtime
727787
filename = query.filename
728-
source_path = query.source_path
788+
build_plan = query.build_plan
729789
_timestamp = args.zip_file_timestamp
730790

731791
timestamp = 0
@@ -736,18 +796,11 @@ def build_command(args):
736796
logger.info('Reused: %s', shlex.quote(filename))
737797
return
738798

739-
# Zip up the temporary directory and write it to the target filename.
799+
# Zip up the build plan and write it to the target filename.
740800
# This will be used by the Lambda function as the source code package.
741801
with ZipWriteStream(filename) as zs:
742-
if os.path.isdir(source_path):
743-
if runtime.startswith('python'):
744-
with install_pip_requirements(
745-
query, zs, os.path.join(source_path, 'requirements.txt')
746-
) as rd:
747-
rd and zs.write_dirs(rd, timestamp=0) # XXX: temp ts=0
748-
zs.write_dirs(source_path)
749-
else:
750-
zs.write_file(source_path)
802+
bpm = BuildPlanManager()
803+
bpm.execute(build_plan, zs, query)
751804

752805
os.utime(filename, ns=(timestamp, timestamp))
753806
logger.info('Created: %s', shlex.quote(filename))

package.tf

+1-1
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ data "external" "archive_prepare" {
2222

2323
artifacts_dir = var.artifacts_dir
2424
runtime = var.runtime
25-
source_path = var.source_path
25+
source_path = jsonencode(var.source_path)
2626
hash_extra = var.hash_extra
2727
hash_extra_paths = jsonencode(["${path.module}/package.py"])
2828
}

0 commit comments

Comments
 (0)