From 1e09cde261cebb6bd42a3cd4931a2ff7a770036d Mon Sep 17 00:00:00 2001 From: Murali Anagani Date: Fri, 29 Jan 2016 12:52:49 -0800 Subject: [PATCH 001/124] Support to provide a rez release message using an editor. --- src/rez/cli/release.py | 25 ++++++++++++++++++++++++- src/rez/config.py | 1 + src/rez/rezconfig.py | 4 ++++ src/rez/utils/platform_.py | 2 +- 4 files changed, 30 insertions(+), 2 deletions(-) diff --git a/src/rez/cli/release.py b/src/rez/cli/release.py index 2d5d48c74..aa48b54d2 100644 --- a/src/rez/cli/release.py +++ b/src/rez/cli/release.py @@ -2,6 +2,7 @@ Build a package from source and deploy it. ''' import os +from subprocess import call def setup_parser(parser, completions=False): @@ -27,6 +28,9 @@ def setup_parser(parser, completions=False): help="release even if repository-related errors occur. DO NOT use this " "option unless you absolutely must release a package, despite there being " "a problem (such as inability to contact the repository server)") + parser.add_argument( + "--no-message", dest="no_message", action="store_true", + help="do not prompt for release message.") setup_parser_common(parser) @@ -35,6 +39,21 @@ def command(opts, parser, extra_arg_groups=None): from rez.build_system import create_build_system from rez.release_vcs import create_release_vcs from rez.cli.build import get_build_args + from rez.config import config + + release_msg = opts.message + filename = None + + if config.prompt_release_message and not release_msg and not opts.no_message: + filename = os.path.join(config.tmpdir, "rez_release_message.tmp") + + with open(filename, "a+") as f: + ed = config.editor + call([ed, filename]) + + read_data = f.read() + if read_data: + release_msg = read_data working_dir = os.getcwd() @@ -61,5 +80,9 @@ def command(opts, parser, extra_arg_groups=None): ignore_existing_tag=opts.ignore_existing_tag, verbose=True) - builder.release(release_message=opts.message, + builder.release(release_message=release_msg, variants=opts.variants) + + # remove the release message file + if filename: + os.remove(filename) diff --git a/src/rez/config.py b/src/rez/config.py index 144c6b67b..5e0a71b13 100644 --- a/src/rez/config.py +++ b/src/rez/config.py @@ -219,6 +219,7 @@ def _parse_env_var(self, value): "parent_variables": StrList, "resetting_variables": StrList, "release_hooks": StrList, + "prompt_release_message": Bool, "critical_styles": OptionalStrList, "error_styles": OptionalStrList, "warning_styles": OptionalStrList, diff --git a/src/rez/rezconfig.py b/src/rez/rezconfig.py index e71086bb6..57a4d005f 100644 --- a/src/rez/rezconfig.py +++ b/src/rez/rezconfig.py @@ -351,6 +351,10 @@ # rezplugins/release_hook. release_hooks = [] +# Prompt for release message using an editor. If set to False, there will be +# no editor prompt. +prompt_release_message = True + ############################################################################### # Suites diff --git a/src/rez/utils/platform_.py b/src/rez/utils/platform_.py index ca73cdad6..97a329b7f 100644 --- a/src/rez/utils/platform_.py +++ b/src/rez/utils/platform_.py @@ -271,7 +271,7 @@ def _editor(self): ed = os.getenv("EDITOR") if ed is None: from rez.util import which - ed = which("xdg-open", "vim", "vi") + ed = which("vi", "vim", "xdg-open") return ed def _difftool(self): From ac245f9b71e407a217392ae1ae9086058c0fb077 Mon Sep 17 00:00:00 2001 From: Allan Johns Date: Wed, 20 Jul 2016 12:13:26 -0700 Subject: [PATCH 002/124] -added pagke_repo pre_variant_install() -filesystem pkg repo uses this to create tmp '.buildingXXX' files during install/release -these tagfiles tell the system it needs to check for valid package.py files for these packages -this avoids breaking resolves when partial pkg installs don't yet have package.py written --- src/rez/package_repository.py | 8 ++ src/rezplugins/build_process/local.py | 7 ++ .../package_repository/filesystem.py | 119 ++++++++++++++++-- 3 files changed, 121 insertions(+), 13 deletions(-) diff --git a/src/rez/package_repository.py b/src/rez/package_repository.py index 3bd17b153..87569e0a0 100644 --- a/src/rez/package_repository.py +++ b/src/rez/package_repository.py @@ -141,6 +141,14 @@ def iter_variants(self, package_resource): """ raise NotImplementedError + def pre_variant_install(self, variant_resource): + """Called before a variant is installed. + + If any directories are created on disk for the variant to install into, + this is called before that happens. + """ + pass + def install_variant(self, variant_resource, dry_run=False, overrides=None): """Install a variant into this repository. diff --git a/src/rezplugins/build_process/local.py b/src/rezplugins/build_process/local.py index 3e3d20926..c44a317c9 100644 --- a/src/rezplugins/build_process/local.py +++ b/src/rezplugins/build_process/local.py @@ -1,6 +1,7 @@ """ Builds packages on local host """ +from rez.package_repository import package_repository_manager from rez.build_process_ import BuildProcessHelper, BuildType from rez.release_hook import ReleaseHookEvent from rez.exceptions import BuildError, ReleaseError @@ -96,10 +97,16 @@ def _build_variant_base(self, variant, build_type, install_path=None, install_path = install_path or self.package.config.local_packages_path variant_install_path = self.get_package_install_path(install_path) variant_build_path = self.build_path + if variant.subpath: variant_build_path = os.path.join(variant_build_path, variant.subpath) variant_install_path = os.path.join(variant_install_path, variant.subpath) + # inform package repo that a variant is about to be built/installed + pkg_repo = package_repository_manager.get_repository(install_path) + pkg_repo.pre_variant_install(variant.resource) + + # create directories (build, install) if clean and os.path.exists(variant_build_path): shutil.rmtree(variant_build_path) if not os.path.exists(variant_build_path): diff --git a/src/rezplugins/package_repository/filesystem.py b/src/rezplugins/package_repository/filesystem.py index defa93121..c2f18dcc1 100644 --- a/src/rezplugins/package_repository/filesystem.py +++ b/src/rezplugins/package_repository/filesystem.py @@ -74,12 +74,6 @@ def iter_packages(self): # versioned packages for version_str in self._repository._get_version_dirs(self.path): - - if _settings.check_package_definition_files: - path = os.path.join(self.path, version_str) - if not self._repository._get_file(path, "package")[0]: - continue - package = self._repository.get_resource( FileSystemPackageResource.key, location=self.location, @@ -177,8 +171,6 @@ def _load_old_formats(self): pass return data - # should be static or classmethod, since it's passed as an arg to - # load_from_file, which is memcached @staticmethod def _update_changelog(file_format, data): # this is to deal with older package releases. They can contain long @@ -403,6 +395,8 @@ class FileSystemPackageRepository(PackageRepository): schema_dict = {"file_lock_timeout": int, "file_lock_dir": Or(None, str)} + building_prefix = ".building" + @classmethod def name(cls): return "filesystem" @@ -488,6 +482,24 @@ def file_lock_dir(self): return dirname + def pre_variant_install(self, variant_resource): + if not variant_resource.version: + return + + # create 'building' tagfile, this makes sure that a resolve doesn't + # pick up this package if it doesn't yet have a package.py created. + path = self.location + + family_path = os.path.join(path, variant_resource.name) + if not os.path.isdir(family_path): + os.makedirs(family_path) + + filename = self.building_prefix + str(variant_resource.version) + filepath = os.path.join(family_path, filename) + + with open(filepath, 'w'): # create empty file + pass + def install_variant(self, variant_resource, dry_run=False, overrides=None): if variant_resource._repository is self: return variant_resource @@ -548,6 +560,7 @@ def _get_family_dirs(self): dirs = [] if not os.path.isdir(self.location): return dirs + for name in os.listdir(self.location): path = os.path.join(self.location, name) if os.path.isdir(path): @@ -557,6 +570,7 @@ def _get_family_dirs(self): name_, ext_ = os.path.splitext(name) if ext_ in (".py", ".yaml") and is_valid_package_name(name_): dirs.append((name_, ext_[1:])) + return dirs def _get_version_dirs__key(self, root): @@ -568,14 +582,59 @@ def _get_version_dirs__key(self, root): key=_get_version_dirs__key, debug=config.debug_memcache) def _get_version_dirs(self, root): - dirs = [] + + # simpler case if this test is on + # + if _settings.check_package_definition_files: + dirs = [] + + for name in os.listdir(root): + if name.startswith('.'): + continue + + path = os.path.join(root, name) + if os.path.isdir(path): + if not self._is_valid_package_directory(path): + continue + + dirs.append(name) + return dirs + + # with test off, we have to check for 'building' dirs, these have to be + # tested regardless. Failed releases may cause 'building files' to be + # left behind, so we need to clear these out also + # + dirs = set() + building_dirs = set() + + # find dirs and dirs marked as 'building' for name in os.listdir(root): if name.startswith('.'): - continue + if not name.startswith(self.building_prefix): + continue + + ver_str = name[len(self.building_prefix):] + building_dirs.add(ver_str) + path = os.path.join(root, name) if os.path.isdir(path): - dirs.append(name) - return dirs + dirs.add(name) + + # check 'building' dirs for validity + for name in building_dirs: + if name not in dirs: + continue + + path = os.path.join(root, name) + if not self._is_valid_package_directory(path): + # package probably still being built + dirs.remove(name) + + return list(dirs) + + # True if `path` contains package.py or similar + def _is_valid_package_directory(self, path): + return bool(self._get_file(path, "package")[0]) def _get_families(self): families = [] @@ -592,6 +651,7 @@ def _get_families(self): name=name, ext=ext) families.append(family) + return families def _get_family(self, name): @@ -760,7 +820,7 @@ def _create_variant(self, variant, dry_run=False, overrides=None): package_data["config"] = parent_package._data.get("config") package_data.pop("base", None) - # create version dir and write out the new package definition file + # create version dir if it doesn't already exist family_path = os.path.join(self.location, variant.name) if variant.version: path = os.path.join(family_path, str(variant.version)) @@ -778,10 +838,28 @@ def _create_variant(self, variant, dry_run=False, overrides=None): if package_data.get(key) is None: package_data[key] = value + # write out the new package definition file filepath = os.path.join(path, "package.py") with open_file_for_write(filepath) as f: dump_package_data(package_data, buf=f, format_=package_format) + # delete the tmp 'building' file. + if variant.version: + filename = self.building_prefix + str(variant.version) + filepath = os.path.join(family_path, filename) + if os.path.exists(filepath): + try: + os.remove(filepath) + except: + pass + + # delete other stale building files; previous failed releases may have + # left some around + try: + self._delete_stale_build_tagfiles(family_path) + except: + pass + # touch the family dir, this keeps memcached resolves updated properly os.utime(family_path, None) @@ -803,6 +881,21 @@ def _create_variant(self, variant, dry_run=False, overrides=None): raise RezSystemError("Internal failure - expected installed variant") return new_variant + def _delete_stale_build_tagfiles(self, family_path): + now = time.time() + + for name in os.listdir(family_path): + if not name.startswith(self.building_prefix): + continue + + filepath = os.path.join(family_path, name) + st = os.stat(filepath) + + # just delete if > 24hrs old + age = now - st.st_ctime + if age > (3600 * 24): + os.remove(filepath) + def register_plugin(): return FileSystemPackageRepository From f0f6e4b1a55c323b7a250e0f6ab3f58986483c00 Mon Sep 17 00:00:00 2001 From: Allan Johns Date: Thu, 21 Jul 2016 11:08:36 -0700 Subject: [PATCH 003/124] -more selective deletion of stale buildtagfiles (.buildVER) --- src/rez/utils/_version.py | 2 +- .../package_repository/filesystem.py | 25 +++++++++++++------ 2 files changed, 19 insertions(+), 8 deletions(-) diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index f9a814b34..dccdae4d3 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.0.rc1.43" +_rez_version = "2.0.rc1.51" # Copyright 2013-2016 Allan Johns. diff --git a/src/rezplugins/package_repository/filesystem.py b/src/rezplugins/package_repository/filesystem.py index c2f18dcc1..41c849993 100644 --- a/src/rezplugins/package_repository/filesystem.py +++ b/src/rezplugins/package_repository/filesystem.py @@ -888,13 +888,24 @@ def _delete_stale_build_tagfiles(self, family_path): if not name.startswith(self.building_prefix): continue - filepath = os.path.join(family_path, name) - st = os.stat(filepath) - - # just delete if > 24hrs old - age = now - st.st_ctime - if age > (3600 * 24): - os.remove(filepath) + tagfilepath = os.path.join(family_path, name) + ver_str = name[len(self.building_prefix):] + pkg_path = os.path.join(family_path, ver_str) + + if os.path.exists(pkg_path): + # build tagfile not needed if package is valid + if self._is_valid_package_directory(pkg_path): + os.remove(tagfilepath) + continue + else: + # remove tagfile if pkg is gone. Delete only tagfiles over a certain + # age, otherwise might delete a tagfile another process has created + # just before it created the package directory. + st = os.stat(tagfilepath) + age = now - st.st_mtime + + if age > 3600: + os.remove(tagfilepath) def register_plugin(): From b89dedc82046716e129edf9e128b69055947db7c Mon Sep 17 00:00:00 2001 From: Allan Johns Date: Thu, 21 Jul 2016 15:04:16 -0700 Subject: [PATCH 004/124] -make package_filter config setting usable in package.py config overrides --- src/rez/build_process_.py | 10 ++++++++++ src/rez/config.py | 3 +++ src/rez/utils/_version.py | 2 +- 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/src/rez/build_process_.py b/src/rez/build_process_.py index 0ecbb6ce0..678f7ec21 100644 --- a/src/rez/build_process_.py +++ b/src/rez/build_process_.py @@ -189,13 +189,23 @@ def create_build_context(self, variant, build_type, build_path): requests_str = ' '.join(map(str, request)) self._print("Resolving build environment: %s", requests_str) + if build_type == BuildType.local: packages_path = self.package.config.packages_path else: packages_path = self.package.config.nonlocal_packages_path + if self.package.config.is_overridden("package_filter"): + from rez.package_filter import PackageFilterList + + data = self.package.config.package_filter + package_filter = PackageFilterList.from_pod(data) + else: + package_filter = None + context = ResolvedContext(request, package_paths=packages_path, + package_filter=package_filter, building=True) if self.verbose: context.print_info() diff --git a/src/rez/config.py b/src/rez/config.py index 4354591bd..1a62f482a 100644 --- a/src/rez/config.py +++ b/src/rez/config.py @@ -387,6 +387,9 @@ def override(self, key, value): self.overrides[key] = value self._uncache(key) + def is_overridden(self, key): + return (key in self.overrides) + def remove_override(self, key): """Remove a setting override, if one exists.""" keys = key.split('.') diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index dccdae4d3..49c7d83bc 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.0.rc1.51" +_rez_version = "2.0.rc1.52" # Copyright 2013-2016 Allan Johns. From 21445a625ec7286a81dde78cce43246cc411d7a0 Mon Sep 17 00:00:00 2001 From: Sebastian Kral Date: Mon, 15 Aug 2016 18:44:11 -0400 Subject: [PATCH 005/124] Added Travis CI integration --- .travis.yml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 .travis.yml diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 000000000..9bf1263ad --- /dev/null +++ b/.travis.yml @@ -0,0 +1,14 @@ +language: python + +python: + - "2.7" + +install: + # Adding tcsh + - "sudo apt-get install tcsh" + # Installing rez + - "python ./install.py ../rez_install" + +script: + # Running rez tests + - "../rez_install/bin/rez/rez-selftest" From 9e6088a08dfa855b56fb2413a649cc2148f77348 Mon Sep 17 00:00:00 2001 From: Sebastian Kral Date: Mon, 15 Aug 2016 18:46:08 -0400 Subject: [PATCH 006/124] Added appveyor integration --- appveyor.yml | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 appveyor.yml diff --git a/appveyor.yml b/appveyor.yml new file mode 100644 index 000000000..0afb8e1c9 --- /dev/null +++ b/appveyor.yml @@ -0,0 +1,17 @@ +environment: + matrix: + # For Python versions available on Appveyor, see + # http://www.appveyor.com/docs/installed-software#python + # The list here is complete (excluding Python 2.6, which + # isn't covered by this document) at the time of writing. + - PYTHON: "C:\\Python27-x64" + +install: + # Install rez + - "%PYTHON%\\python.exe install.py temp" + +build: off + +test_script: + # Run rez selftest + - "C:\\projects\\rez\\temp\\Scripts\\rez\\rez.exe selftest" From 192d57daea4cc4f1c0b1a4cf38331fc7eef09ac6 Mon Sep 17 00:00:00 2001 From: Blazej Floch Date: Sun, 4 Sep 2016 12:07:49 -0400 Subject: [PATCH 007/124] Fixes postfix prompt and cleans whitespace to be more controllable. --- src/rezplugins/shell/sh.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/rezplugins/shell/sh.py b/src/rezplugins/shell/sh.py index e6d77bfc0..7fc10ac31 100644 --- a/src/rezplugins/shell/sh.py +++ b/src/rezplugins/shell/sh.py @@ -95,7 +95,7 @@ def _bind_interactive_rez(self): if config.prefix_prompt: cmd = 'export PS1="%s $REZ_STORED_PROMPT"' else: - cmd = 'export PS1="$REZ_STORED_PROMPT" %s' + cmd = 'export PS1="$REZ_STORED_PROMPT%s "' self._addline(cmd % "\[\e[1m\]$REZ_ENV_PROMPT\[\e[0m\]") def setenv(self, key, value): From c61d1a78cc1b6ad717488498e71198a935c31fef Mon Sep 17 00:00:00 2001 From: Paul Molodowitch Date: Tue, 1 Mar 2016 18:22:41 -0800 Subject: [PATCH 008/124] build_process.local: _build_variant_base - if shutil.copy fails, try replacing the file --- src/rezplugins/build_process/local.py | 34 ++++++++++++++++++++++++++- 1 file changed, 33 insertions(+), 1 deletion(-) diff --git a/src/rezplugins/build_process/local.py b/src/rezplugins/build_process/local.py index 3e3d20926..40dc24150 100644 --- a/src/rezplugins/build_process/local.py +++ b/src/rezplugins/build_process/local.py @@ -128,11 +128,43 @@ def _build_variant_base(self, variant, build_type, install_path=None, if not build_result.get("success"): raise BuildError("The %s build system failed" % build_system_name) + def copy_or_replace(src, dst): + '''try to copy with mode, and if it fails, try replacing + ''' + try: + shutil.copy(src, dst) + except (OSError, IOError), e: + # It's possible that the file existed, but was owned by someone + # else - in that situation, shutil.copy might then fail when it + # tries to copy perms. + # However, it's possible that we have write perms to the dir - + # in which case, we can just delete and replace + import errno + if e.errno == errno.EPERM: + import tempfile + # try copying into a temporary location beside the old + # file - if we have perms to do that, we should have perms + # to then delete the old file, and move the new one into + # place + if os.path.isdir(dst): + dst = os.path.join(dst, os.path.basename(src)) + + dst_dir, dst_name = os.path.split(dst) + dst_temp = tempfile.mktemp(prefix=dst_name + '.', + dir=dst_dir) + shutil.copy(src, dst_temp) + if not os.path.isfile(dst_temp): + raise RuntimeError( + "shutil.copy completed successfully, but path" + " '%s' still did not exist" % dst_temp) + os.remove(dst) + shutil.move(dst_temp, dst) + if install: # install some files for debugging purposes extra_files = build_result.get("extra_files", []) + [rxt_filepath] for file_ in extra_files: - shutil.copy(file_, variant_install_path) + copy_or_replace(file_, variant_install_path) return build_result From 739ca770bbfc3dc58709bf3d7e6acbd8cc0655dd Mon Sep 17 00:00:00 2001 From: Paul Molodowitch Date: Wed, 19 Oct 2016 14:55:38 -0700 Subject: [PATCH 009/124] hg: print log from most recent to oldest, so don't truncate current commit --- src/rezplugins/release_vcs/hg.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/rezplugins/release_vcs/hg.py b/src/rezplugins/release_vcs/hg.py index 0d959c2e3..7ea498fc5 100644 --- a/src/rezplugins/release_vcs/hg.py +++ b/src/rezplugins/release_vcs/hg.py @@ -212,7 +212,15 @@ def get_changelog(self, previous_revision=None): if prev_commit: # git behavior is to simply print the log from the last common # ancsestor... which is apparently desired. so we'll mimic that - commit_range = "ancestor(%s, .)::." % prev_commit + + # however, we want to print in order from most recent to oldest, + # because: + # a) if the log gets truncated, we want to cut off the + # oldest commits, not the current one, and + # b) this mimics the order they're printed in git + # c) this mimics the order they're printed if you have no + # previous_revision, and just do "hg log" + commit_range = "reverse(ancestor(%s, .)::.)" % prev_commit stdout = self.hg("log", "-r", commit_range) else: stdout = self.hg("log") From b3244564f388a0c6a5a495243bc1770c9dac83c2 Mon Sep 17 00:00:00 2001 From: Paul Molodowitch Date: Wed, 19 Oct 2016 15:45:54 -0700 Subject: [PATCH 010/124] add max_package_changelog_revisions config option --- src/rez/build_process_.py | 4 +++- src/rez/config.py | 1 + src/rez/release_vcs.py | 2 +- src/rez/rezconfig.py | 2 ++ src/rezplugins/release_vcs/git.py | 10 ++++++---- src/rezplugins/release_vcs/hg.py | 13 +++++++++---- src/rezplugins/release_vcs/stub.py | 2 +- src/rezplugins/release_vcs/svn.py | 2 +- 8 files changed, 24 insertions(+), 12 deletions(-) diff --git a/src/rez/build_process_.py b/src/rez/build_process_.py index 0ecbb6ce0..cc1e6df5e 100644 --- a/src/rez/build_process_.py +++ b/src/rez/build_process_.py @@ -328,7 +328,9 @@ def get_release_data(self): with self.repo_operation(): revision = self.vcs.get_current_revision() with self.repo_operation(): - changelog = self.vcs.get_changelog(previous_revision) + changelog = self.vcs.get_changelog( + previous_revision, + max_revisions=config.max_package_changelog_revisions) # truncate changelog - very large changelogs can cause package load # times to be very high, we don't want that diff --git a/src/rez/config.py b/src/rez/config.py index 633bf6bf0..91c9ee1cf 100644 --- a/src/rez/config.py +++ b/src/rez/config.py @@ -266,6 +266,7 @@ def _parse_env_var(self, value): "build_thread_count": BuildThreadCount_, "resource_caching_maxsize": Int, "max_package_changelog_chars": Int, + "max_package_changelog_revisions": Int, "memcached_package_file_min_compress_len": Int, "memcached_context_file_min_compress_len": Int, "memcached_listdir_min_compress_len": Int, diff --git a/src/rez/release_vcs.py b/src/rez/release_vcs.py index 12d4c1391..e4bb85efa 100644 --- a/src/rez/release_vcs.py +++ b/src/rez/release_vcs.py @@ -144,7 +144,7 @@ def get_current_revision(self): """ raise NotImplementedError - def get_changelog(self, previous_revision=None): + def get_changelog(self, previous_revision=None, max_revisions=None): """Get the changelog text since the given revision. If previous_revision is not an ancestor (for example, the last release diff --git a/src/rez/rezconfig.py b/src/rez/rezconfig.py index 2b0b36188..45bf34f9f 100644 --- a/src/rez/rezconfig.py +++ b/src/rez/rezconfig.py @@ -476,6 +476,8 @@ # this adversely impacts package load times. max_package_changelog_chars = 65536 +# If not zero, truncates all package changelogs to only show the last N commits +max_package_changelog_revisions = 0 ############################################################################### # Rez-1 Compatibility diff --git a/src/rezplugins/release_vcs/git.py b/src/rezplugins/release_vcs/git.py index a3ca23da3..b13360de7 100644 --- a/src/rezplugins/release_vcs/git.py +++ b/src/rezplugins/release_vcs/git.py @@ -134,7 +134,7 @@ def validate_repostate(self): "Could not release: %d commits %s %s." % (abs(n), s, remote_uri)) - def get_changelog(self, previous_revision=None): + def get_changelog(self, previous_revision=None, max_revisions=None): prev_commit = None if previous_revision is not None: try: @@ -144,13 +144,15 @@ def get_changelog(self, previous_revision=None): print_debug("couldn't determine previous commit from: %r" % previous_revision) + args = ["log"] + if max_revisions: + args.extend(["-n", str(max_revisions)]) if prev_commit: # git returns logs to last common ancestor, so even if previous # release was from a different branch, this is ok commit_range = "%s..HEAD" % prev_commit - stdout = self.git("log", commit_range) - else: - stdout = self.git("log") + args.append(commit_range) + stdout = self.git(*args) return '\n'.join(stdout) diff --git a/src/rezplugins/release_vcs/hg.py b/src/rezplugins/release_vcs/hg.py index 7ea498fc5..c9e4d1bf9 100644 --- a/src/rezplugins/release_vcs/hg.py +++ b/src/rezplugins/release_vcs/hg.py @@ -199,7 +199,7 @@ def _get(key, fn): return doc - def get_changelog(self, previous_revision=None): + def get_changelog(self, previous_revision=None, max_revisions=None): prev_commit = None if previous_revision is not None: try: @@ -209,6 +209,9 @@ def get_changelog(self, previous_revision=None): print_debug("couldn't determine previous commit from: %r" % previous_revision) + args = ["log"] + if max_revisions: + args.extend(["-l", str(max_revisions)]) if prev_commit: # git behavior is to simply print the log from the last common # ancsestor... which is apparently desired. so we'll mimic that @@ -220,10 +223,12 @@ def get_changelog(self, previous_revision=None): # b) this mimics the order they're printed in git # c) this mimics the order they're printed if you have no # previous_revision, and just do "hg log" + # d) if max_revisions is giving, want limiting will only take the + # most recent N entries commit_range = "reverse(ancestor(%s, .)::.)" % prev_commit - stdout = self.hg("log", "-r", commit_range) - else: - stdout = self.hg("log") + args.extend(["-r", commit_range]) + + stdout = self.hg(*args) return '\n'.join(stdout) def create_release_tag(self, tag_name, message=None): diff --git a/src/rezplugins/release_vcs/stub.py b/src/rezplugins/release_vcs/stub.py index 1ba0401d9..0577de470 100644 --- a/src/rezplugins/release_vcs/stub.py +++ b/src/rezplugins/release_vcs/stub.py @@ -37,7 +37,7 @@ def validate_repostate(self): def get_current_revision(self): return self.time - def get_changelog(self, previous_revision=None): + def get_changelog(self, previous_revision=None, max_revisions=None): if previous_revision: if isinstance(previous_revision, int): seconds = self.time - previous_revision diff --git a/src/rezplugins/release_vcs/svn.py b/src/rezplugins/release_vcs/svn.py index a3d65e0ee..84bcdc9e6 100644 --- a/src/rezplugins/release_vcs/svn.py +++ b/src/rezplugins/release_vcs/svn.py @@ -95,7 +95,7 @@ def _create_tag_impl(self, tag_name, message=None): self.svnc.callback_get_log_message = lambda x: (True, x) self.svnc.copy2([(self.this_url,)], tag_url, make_parents=True) - def get_changelog(self, previous_revision=None): + def get_changelog(self, previous_revision=None, max_revisions=None): return "TODO" def get_tag_url(self, tag_name=None): From 7da94e1e0570d9cff3699e0d83e55f79884a93b9 Mon Sep 17 00:00:00 2001 From: Paul Molodowitch Date: Thu, 3 Nov 2016 12:34:04 -0700 Subject: [PATCH 011/124] added option to force color output, even if not a tty --- src/rez/config.py | 19 ++++++++++++++++--- src/rez/rezconfig.py | 5 +++++ src/rez/utils/colorize.py | 11 ++++++++--- 3 files changed, 29 insertions(+), 6 deletions(-) diff --git a/src/rez/config.py b/src/rez/config.py index 633bf6bf0..9ecdf1cdc 100644 --- a/src/rez/config.py +++ b/src/rez/config.py @@ -126,6 +126,7 @@ class Bool(Setting): schema = Schema(bool) true_words = frozenset(["1", "true", "yes", "y", "on"]) false_words = frozenset(["0", "false", "no", "n", "off"]) + all_words = true_words | false_words def _parse_env_var(self, value): value = value.lower() @@ -134,10 +135,22 @@ def _parse_env_var(self, value): elif value in self.false_words: return False else: - words = self.true_words | self.false_words raise ConfigurationError( "expected $%s to be one of: %s" - % (self._env_var_name, ", ".join(words))) + % (self._env_var_name, ", ".join(self.all_words))) + + +class ForceOrBool(Bool): + FORCE_STR = "force" + + # need force first, or Bool.schema will coerce "force" to True + schema = Or(FORCE_STR, Bool.schema) + all_words = Bool.all_words | frozenset([FORCE_STR]) + + def _parse_env_var(self, value): + if value == self.FORCE_STR: + return value + super(ForceOrBool, self)._parse_env_var(value) class Dict(Setting): @@ -272,7 +285,7 @@ def _parse_env_var(self, value): "memcached_resolve_min_compress_len": Int, "allow_unversioned_packages": Bool, "rxt_as_yaml": Bool, - "color_enabled": Bool, + "color_enabled": ForceOrBool, "resolve_caching": Bool, "cache_package_files": Bool, "cache_listdir": Bool, diff --git a/src/rez/rezconfig.py b/src/rez/rezconfig.py index 2b0b36188..dbe856ed5 100644 --- a/src/rez/rezconfig.py +++ b/src/rez/rezconfig.py @@ -586,6 +586,11 @@ # Enables/disables colorization globally. # Note: Turned off for Windows currently as there seems to be a problem with # the Colorama module. +# May also set to the string "force", which will make rez output color styling +# information, even if the the output streams are not ttys. Useful if you are +# piping the output of rez, but will eventually be printing to a tty later. +# When force is used, will generally be set through an environemnt variable, ie, +# echo $(REZ_COLOR_ENABLED=force python -c "from rez.utils.colorize import Printer, local; Printer()('foo', local)") color_enabled = (os.name == "posix") ### Do not move or delete this comment (__DOC_END__) diff --git a/src/rez/utils/colorize.py b/src/rez/utils/colorize.py index 0d81eb5ec..5a1d4b0a1 100644 --- a/src/rez/utils/colorize.py +++ b/src/rez/utils/colorize.py @@ -258,6 +258,10 @@ def is_tty(self): """ return stream_is_tty(self.stream) + @property + def is_colorized(self): + return config.get("color_enabled", False) == "force" or self.is_tty + def _get_style_function_for_level(self, level): return self.STYLES.get(level, notset) @@ -271,7 +275,7 @@ def emit(self, record): try: message = self.format(record) - if not self.is_tty: + if not self.is_colorized: self.stream.write(message) else: style = self._get_style_function_for_level(record.levelno) @@ -289,13 +293,14 @@ def emit(self, record): class Printer(object): def __init__(self, buf=sys.stdout): self.buf = buf - self.tty = stream_is_tty(buf) + self.colorize = (config.get("color_enabled", False) == "force") \ + or stream_is_tty(buf) def __call__(self, msg='', style=None): print >> self.buf, self.get(msg, style) def get(self, msg, style=None): - if style and self.tty: + if style and self.colorize: msg = style(msg) return msg From 3d8364c700c400bfbf6a4eb983349e81cf94822d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fredrik=20Br=C3=A4nnbacka=20FL?= Date: Tue, 8 Nov 2016 13:00:05 +0100 Subject: [PATCH 012/124] Added failgraph flag to rez-env --- src/rez/cli/env.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/rez/cli/env.py b/src/rez/cli/env.py index 37628a175..d8c7581fc 100644 --- a/src/rez/cli/env.py +++ b/src/rez/cli/env.py @@ -91,6 +91,10 @@ def setup_parser(parser, completions=False): parser.add_argument( "-q", "--quiet", action="store_true", help="run in quiet mode (hides welcome message)") + parser.add_argument( + "--fail-graph", action="store_true", + help="if the build environment fails to resolve due to a conflict, " + "display the resolve graph as an image.") parser.add_argument( "--new-session", action="store_true", help="start the shell in a new process group") @@ -197,6 +201,14 @@ def command(opts, parser, extra_arg_groups=None): success = (context.status == ResolverStatus.solved) if not success: context.print_info(buf=sys.stderr) + if opts.fail_graph: + if context.graph: + from rez.utils.graph_utils import view_graph + g = context.graph(as_dot=True) + view_graph(g) + else: + print >> sys.stderr, \ + "the failed resolve context did not generate a graph." if opts.output: if opts.output == '-': # print to stdout From 2d656f0a119a575f54d1fe8c7965598a58c9ae2d Mon Sep 17 00:00:00 2001 From: Paul Molodowitch Date: Tue, 8 Nov 2016 15:03:59 -0800 Subject: [PATCH 013/124] rex: don't let packages pollute namespace of future packages --- src/rez/rex.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/src/rez/rex.py b/src/rez/rex.py index ca8138681..cb4ab65ad 100644 --- a/src/rez/rex.py +++ b/src/rez/rex.py @@ -1169,9 +1169,20 @@ def execute_code(self, code, filename=None): code (str): Rex code to execute. filename (str): Filename to report if there are syntax errors. """ - self.compile_code(code=code, - filename=filename, - exec_namespace=self.globals) + # we want to execute the code using self.globals - if for no other + # reason that self.formatter is pointing at self.globals, so if we + # passed in a copy, we would also need to make self.formatter "look" at + # the same copy - but we don't want to "pollute" our namespace, because + # the same executor may be used to run multiple packages. Therefore, + # we save a copy of self.globals before execution, and restore it after + saved_globals = dict(self.globals) + try: + self.compile_code(code=code, + filename=filename, + exec_namespace=self.globals) + finally: + self.globals.clear() + self.globals.update(saved_globals) def execute_function(self, func, *nargs, **kwargs): """ From 007e9663973c38f2964a21ec939b77a9da77b308 Mon Sep 17 00:00:00 2001 From: Paul Molodowitch Date: Thu, 10 Nov 2016 10:24:52 -0800 Subject: [PATCH 014/124] allow configuration of truncate cap for shell error output --- src/rez/config.py | 1 + src/rez/rezconfig.py | 4 ++++ src/rez/vendor/sh/__init__.py | 6 ++++++ src/rez/vendor/sh/sh.py | 19 +++++++++++-------- 4 files changed, 22 insertions(+), 8 deletions(-) diff --git a/src/rez/config.py b/src/rez/config.py index 633bf6bf0..f8410f931 100644 --- a/src/rez/config.py +++ b/src/rez/config.py @@ -297,6 +297,7 @@ def _parse_env_var(self, value): "quiet": Bool, "show_progress": Bool, "catch_rex_errors": Bool, + "shell_error_truncate_cap": Int, "set_prompt": Bool, "prefix_prompt": Bool, "warn_old_commands": Bool, diff --git a/src/rez/rezconfig.py b/src/rez/rezconfig.py index 2b0b36188..57b3c1e68 100644 --- a/src/rez/rezconfig.py +++ b/src/rez/rezconfig.py @@ -379,6 +379,10 @@ # are left uncaught, which can be useful for debugging purposes. catch_rex_errors = True +# Sets the maximum number of characters printed from the stdout / stderr of some +# shell commands when they fail. If 0, then the output is not truncated +shell_error_truncate_cap = 750 + ############################################################################### # Build diff --git a/src/rez/vendor/sh/__init__.py b/src/rez/vendor/sh/__init__.py index e69de29bb..1ec0cb7fb 100644 --- a/src/rez/vendor/sh/__init__.py +++ b/src/rez/vendor/sh/__init__.py @@ -0,0 +1,6 @@ +# putting rez-specific code here, because this file wouldn't exist in a +# "normal" distribution of sh + +from rez.config import config +from . import sh +sh.ErrorReturnCode.truncate_cap = config.shell_error_truncate_cap \ No newline at end of file diff --git a/src/rez/vendor/sh/sh.py b/src/rez/vendor/sh/sh.py index 5bf2e68ae..8f5adc0f3 100644 --- a/src/rez/vendor/sh/sh.py +++ b/src/rez/vendor/sh/sh.py @@ -132,20 +132,23 @@ def __init__(self, full_cmd, stdout, stderr): self.stdout = stdout self.stderr = stderr + def truncate(output, name): + if not self.truncate_cap: + return output + truncated_output = output[:self.truncate_cap] + delta = len(output) - len(truncated_output) + if delta: + truncated_output += ( + "... (%d more, please see e.%s)" % (delta, name)).encode() + return truncated_output if self.stdout is None: exc_stdout = "" else: - exc_stdout = self.stdout[:self.truncate_cap] - out_delta = len(self.stdout) - len(exc_stdout) - if out_delta: - exc_stdout += ("... (%d more, please see e.stdout)" % out_delta).encode() + exc_stdout = truncate(self.stdout, 'stdout') if self.stderr is None: exc_stderr = "" else: - exc_stderr = self.stderr[:self.truncate_cap] - err_delta = len(self.stderr) - len(exc_stderr) - if err_delta: - exc_stderr += ("... (%d more, please see e.stderr)" % err_delta).encode() + exc_stderr = truncate(self.stderr, 'stderr') msg = "\n\n RAN: %r\n\n STDOUT:\n%s\n\n STDERR:\n%s" % \ (full_cmd, exc_stdout.decode(DEFAULT_ENCODING, "replace"), From 2a565fcaf81bb6b6a486260a4613fcee078c4a40 Mon Sep 17 00:00:00 2001 From: Paul Molodowitch Date: Fri, 11 Nov 2016 16:09:36 -0800 Subject: [PATCH 015/124] fix so that, even if there are no memcached servers, DoNotCache results are properly unwrapped --- src/rez/utils/memcached.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/rez/utils/memcached.py b/src/rez/utils/memcached.py index 08ca603db..5581a3792 100644 --- a/src/rez/utils/memcached.py +++ b/src/rez/utils/memcached.py @@ -353,7 +353,10 @@ def wrapper(*nargs, **kwargs): return result else: def wrapper(*nargs, **kwargs): - return func(*nargs, **kwargs) + result = func(*nargs, **kwargs) + if isinstance(result, DoNotCache): + return result.result + return result def forget(): """Forget entries in the cache. From 9b6e65f202daaa254e1a420d0313c2f8e56f3da0 Mon Sep 17 00:00:00 2001 From: Paul Molodowitch Date: Thu, 8 Dec 2016 11:32:22 -0800 Subject: [PATCH 016/124] fix for TestRelease.test_2_variant_add --- src/rez/tests/test_release.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/rez/tests/test_release.py b/src/rez/tests/test_release.py index 65ed80595..3258b620d 100644 --- a/src/rez/tests/test_release.py +++ b/src/rez/tests/test_release.py @@ -170,8 +170,14 @@ def test_1(self): def test_2_variant_add(self): """Test variant installation on release """ + orig_src_path = self.src_path self.src_path = os.path.join(self.src_path, "variants") - self._setup_release() + try: + self._setup_release() + finally: + # due to shell_dependent, this will run multiple times, don't + # want to add src_path/variants/variants + self.src_path = orig_src_path # copy the spangle package onto the packages path os.mkdir(self.install_root) From 9e8ca7264cfac00a5fcc81e54bec4e7f98f9b3cf Mon Sep 17 00:00:00 2001 From: Paul Molodowitch Date: Thu, 8 Dec 2016 12:04:39 -0800 Subject: [PATCH 017/124] test_solver: corrected setting of config.packages_path --- src/rez/tests/test_solver.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/rez/tests/test_solver.py b/src/rez/tests/test_solver.py index 9412508aa..ce2d394c5 100644 --- a/src/rez/tests/test_solver.py +++ b/src/rez/tests/test_solver.py @@ -17,7 +17,7 @@ def setUpClass(cls): packages_path = os.path.join(path, "data", "solver", "packages") cls.packages_path = [packages_path] cls.settings = dict( - packages_path=[cls.packages_path], + packages_path=cls.packages_path, package_filter=None) def _create_solvers(self, reqs): From d915edb54556fde591abfdd05e922a351ce15d42 Mon Sep 17 00:00:00 2001 From: Paul Molodowitch Date: Tue, 13 Dec 2016 15:02:47 -0800 Subject: [PATCH 018/124] create a set of standard build env vars --- src/rez/build_system.py | 41 ++++++++++++++++++++++++++++ src/rez/resolved_context.py | 6 ++++ src/rezplugins/build_system/bez.py | 37 ++++++++++++++++++++++--- src/rezplugins/build_system/cmake.py | 34 ++++++++--------------- 4 files changed, 92 insertions(+), 26 deletions(-) diff --git a/src/rez/build_system.py b/src/rez/build_system.py index 263ee1de2..2e7ba2cc6 100644 --- a/src/rez/build_system.py +++ b/src/rez/build_system.py @@ -1,3 +1,5 @@ +from multiprocessing import cpu_count + from rez.build_process_ import BuildType from rez.exceptions import BuildSystemError from rez.packages_ import get_developer_package @@ -145,6 +147,45 @@ def build(self, context, variant, build_path, install_path, install=False, """ raise NotImplementedError + @classmethod + def get_standard_vars(cls, context, variant, build_type, install): + """Returns a standard set of environment variables that can be set + for the build system to use + """ + from rez.config import config + + package = variant.parent + vars = { + 'REZ_BUILD_ENV': 1, + 'REZ_BUILD_THREAD_COUNT': package.config.build_thread_count, + 'REZ_BUILD_VARIANT_INDEX': variant.index or 0, + 'REZ_BUILD_PROJECT_VERSION': str(package.version), + 'REZ_BUILD_PROJECT_NAME': package.name, + 'REZ_BUILD_PROJECT_DESCRIPTION': \ + (package.description or '').strip(), + 'REZ_BUILD_PROJECT_FILE': getattr(variant, 'filepath', ''), + 'REZ_BUILD_REQUIRES_UNVERSIONED': \ + ' '.join(x.name for x in context.requested_packages(True)), + 'REZ_BUILD_TYPE': build_type.name, + 'REZ_BUILD_INSTALL': 1 if install else 0, + } + + if config.rez_1_environment_variables and \ + not config.disable_rez_1_compatibility and \ + build_type == BuildType.central: + vars['REZ_IN_REZ_RELEASE'] = 1 + return vars + + @classmethod + def set_standard_vars(cls, executor, context, variant, build_type, + install): + """Sets a standard set of environment variables for the build system to + use + """ + vars = cls.get_standard_vars(context, variant, build_type, install) + for var, value in vars.iteritems(): + executor.env[var] = value + # Copyright 2013-2016 Allan Johns. # diff --git a/src/rez/resolved_context.py b/src/rez/resolved_context.py index 41300fdde..8213040ef 100644 --- a/src/rez/resolved_context.py +++ b/src/rez/resolved_context.py @@ -1468,6 +1468,12 @@ def _minor_heading(txt): _minor_heading("variables for package %s" % pkg.qualified_name) prefix = "REZ_" + pkg.name.upper().replace('.', '_') executor.setenv(prefix + "_VERSION", str(pkg.version)) + major_version = pkg.version[0] if len(pkg.version) >= 1 else '' + minor_version = pkg.version[1] if len(pkg.version) >= 2 else '' + patch_version = pkg.version[2] if len(pkg.version) >= 3 else '' + executor.setenv(prefix + "_MAJOR_VERSION", str(major_version)) + executor.setenv(prefix + "_MINOR_VERSION", str(minor_version)) + executor.setenv(prefix + "_PATCH_VERSION", str(patch_version)) executor.setenv(prefix + "_BASE", pkg.base) executor.setenv(prefix + "_ROOT", pkg.root) bindings[pkg.name] = dict(version=VersionBinding(pkg.version), diff --git a/src/rezplugins/build_system/bez.py b/src/rezplugins/build_system/bez.py index db9d2d464..595c1411e 100644 --- a/src/rezplugins/build_system/bez.py +++ b/src/rezplugins/build_system/bez.py @@ -4,9 +4,11 @@ from rez.build_system import BuildSystem from rez.build_process_ import BuildType from rez.util import create_forwarding_script +from rez.packages_ import get_developer_package from rez.resolved_context import ResolvedContext from rez.config import config from rez.utils.yaml import dump_yaml +import functools import os.path import sys @@ -64,7 +66,11 @@ def build(self, context, variant, build_path, install_path, install=False, create_forwarding_script(build_env_script, module=("build_system", "bez"), func_name="_FWD__spawn_build_shell", - build_dir=build_path) + working_dir=self.working_dir, + build_dir=build_path, + variant_index=variant.index, + install=install) + ret["success"] = True ret["build_env_script"] = build_env_script return ret @@ -74,19 +80,42 @@ def build(self, context, variant, build_path, install_path, install=False, if install and "install" not in cmd: cmd.append("install") + callback = functools.partial(self._add_build_actions, + context=context, + package=self.package, + variant=variant, + build_type=build_type, + install=install) + retcode, _, _ = context.execute_shell(command=cmd, block=True, - cwd=build_path) + cwd=build_path, + actions_callback=callback) ret["success"] = (not retcode) return ret + @classmethod + def _add_build_actions(cls, executor, context, package, variant, + build_type, install): + cls.set_standard_vars(executor, context, variant, build_type, install) -def _FWD__spawn_build_shell(build_dir): + +def _FWD__spawn_build_shell(working_dir, build_dir, variant_index, install): # This spawns a shell that the user can run 'bez' in directly context = ResolvedContext.load(os.path.join(build_dir, "build.rxt")) + package = get_developer_package(working_dir) + variant = package.get_variant(variant_index) config.override("prompt", "BUILD>") - retcode, _, _ = context.execute_shell(block=True, cwd=build_dir) + callback = functools.partial(BezBuildSystem._add_build_actions, + context=context, + package=package, + variant=variant, + build_type=BuildType.local, + install=install) + + retcode, _, _ = context.execute_shell(block=True, cwd=build_dir, + actions_callback=callback) sys.exit(retcode) diff --git a/src/rezplugins/build_system/cmake.py b/src/rezplugins/build_system/cmake.py index 007881193..1beadb746 100644 --- a/src/rezplugins/build_system/cmake.py +++ b/src/rezplugins/build_system/cmake.py @@ -142,7 +142,8 @@ def _pr(s): context=context, package=self.package, variant=variant, - build_type=build_type) + build_type=build_type, + install=install) # run the build command and capture/print stderr at the same time retcode, _, _ = context.execute_shell(command=cmd, @@ -163,7 +164,8 @@ def _pr(s): func_name="_FWD__spawn_build_shell", working_dir=self.working_dir, build_dir=build_path, - variant_index=variant.index) + variant_index=variant.index, + install=install) ret["success"] = True ret["build_env_script"] = build_env_script return ret @@ -201,34 +203,21 @@ def _pr(s): ret["success"] = (not retcode) return ret - @staticmethod - def _add_build_actions(executor, context, package, variant, build_type): + @classmethod + def _add_build_actions(cls, executor, context, package, variant, + build_type, install): settings = package.config.plugins.build_system.cmake cmake_path = os.path.join(os.path.dirname(__file__), "cmake_files") template_path = os.path.join(os.path.dirname(__file__), "template_files") + cls.set_standard_vars(executor, context, variant, build_type, install) + executor.env.CMAKE_MODULE_PATH.append(cmake_path.replace('\\', '/')) executor.env.REZ_BUILD_DOXYFILE = os.path.join(template_path, 'Doxyfile') - executor.env.REZ_BUILD_VARIANT_INDEX = variant.index or 0 - executor.env.REZ_BUILD_THREAD_COUNT = package.config.build_thread_count - # build always occurs on a filesystem package, thus 'filepath' attribute - # exists. This is not the case for packages in general. - executor.env.REZ_BUILD_PROJECT_FILE = package.filepath - executor.env.REZ_BUILD_PROJECT_VERSION = str(package.version) - executor.env.REZ_BUILD_PROJECT_NAME = package.name - executor.env.REZ_BUILD_PROJECT_DESCRIPTION = \ - (package.description or '').strip() - executor.env.REZ_BUILD_REQUIRES_UNVERSIONED = \ - ' '.join(x.name for x in context.requested_packages(True)) executor.env.REZ_BUILD_INSTALL_PYC = '1' if settings.install_pyc else '0' - if config.rez_1_environment_variables and \ - not config.disable_rez_1_compatibility and \ - build_type == BuildType.central: - executor.env.REZ_IN_REZ_RELEASE = 1 - -def _FWD__spawn_build_shell(working_dir, build_dir, variant_index): +def _FWD__spawn_build_shell(working_dir, build_dir, variant_index, install): # This spawns a shell that the user can run 'make' in directly context = ResolvedContext.load(os.path.join(build_dir, "build.rxt")) package = get_developer_package(working_dir) @@ -239,7 +228,8 @@ def _FWD__spawn_build_shell(working_dir, build_dir, variant_index): context=context, package=package, variant=variant, - build_type=BuildType.local) + build_type=BuildType.local, + install=install) retcode, _, _ = context.execute_shell(block=True, cwd=build_dir, From a2eadce0911ac1a6f25758671cdfd6a7cb4bd683 Mon Sep 17 00:00:00 2001 From: Brendan Abel Date: Thu, 12 Jan 2017 16:18:07 -0800 Subject: [PATCH 019/124] Added pbs library for windows support (instead of sh). Updated several plugins to use the alternate library on windows to enable git and command hook support on windows. --- src/rez/vendor/pbs.py | 602 +++++++++++++++++++++++++ src/rezplugins/release_hook/command.py | 9 +- src/rezplugins/release_vcs/git.py | 6 +- 3 files changed, 614 insertions(+), 3 deletions(-) create mode 100644 src/rez/vendor/pbs.py diff --git a/src/rez/vendor/pbs.py b/src/rez/vendor/pbs.py new file mode 100644 index 000000000..be1dc274d --- /dev/null +++ b/src/rez/vendor/pbs.py @@ -0,0 +1,602 @@ +#=============================================================================== +# Copyright (C) 2011-2012 by Andrew Moffat +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +#=============================================================================== + + + +import subprocess as subp +import sys +import traceback +import os +import re +from glob import glob as original_glob +from types import ModuleType +from functools import partial +import warnings +import platform + + +__version__ = "0.110" +__project_url__ = "https://github.com/amoffat/pbs" + +IS_PY3 = sys.version_info[0] == 3 +if IS_PY3: + raw_input = input + unicode = str +else: + pass + + +if "windows" not in platform.system().lower(): + warnings.simplefilter("always") + warnings.warn(""" + +Sh.py is the new pbs. Please download and install sh.py with the following +command: + + $ pip install sh + +or + + $ easy_install sh + +Sh.py includes many enhancements and will be the supported subprocess launcher +for the future. See its documentation here http://amoffat.github.com/sh/. + +To migrate existing code, try this: + + import sh as pbs + +""", DeprecationWarning) + + + +class ErrorReturnCode(Exception): + truncate_cap = 200 + + def __init__(self, full_cmd, stdout, stderr): + self.full_cmd = full_cmd + self.stdout = stdout + self.stderr = stderr + + if self.stdout is None: tstdout = "" + else: + tstdout = self.stdout[:self.truncate_cap] + out_delta = len(self.stdout) - len(tstdout) + if out_delta: + tstdout += ("... (%d more, please see e.stdout)" % out_delta).encode() + + if self.stderr is None: tstderr = "" + else: + tstderr = self.stderr[:self.truncate_cap] + err_delta = len(self.stderr) - len(tstderr) + if err_delta: + tstderr += ("... (%d more, please see e.stderr)" % err_delta).encode() + + msg = "\n\nRan: %r\n\nSTDOUT:\n\n %s\n\nSTDERR:\n\n %s" %\ + (full_cmd, tstdout.decode(), tstderr.decode()) + super(ErrorReturnCode, self).__init__(msg) + +class CommandNotFound(Exception): pass + +rc_exc_regex = re.compile("ErrorReturnCode_(\d+)") +rc_exc_cache = {} + +def get_rc_exc(rc): + rc = int(rc) + try: return rc_exc_cache[rc] + except KeyError: pass + + name = "ErrorReturnCode_%d" % rc + exc = type(name, (ErrorReturnCode,), {}) + rc_exc_cache[rc] = exc + return exc + + + + +def which(program): + is_windows = "windows" in platform.system().lower() + endswith_exe = program.lower().endswith('.exe') + def is_exe(fpath): + return os.path.exists(fpath) and os.access(fpath, os.X_OK) + + fpath, fname = os.path.split(program) + if fpath: + if is_exe(program): + return program + if is_windows and not endswith_exe: + if is_exe(exe_file + '.exe'): + return exe_file + '.exe' + else: + for path in os.environ["PATH"].split(os.pathsep): + exe_file = os.path.join(path, program) + if is_exe(exe_file): + return exe_file + if is_windows and not endswith_exe: + if is_exe(exe_file + '.exe'): + return exe_file + '.exe' + + return None + +def resolve_program(program): + path = which(program) + if not path: + # our actual command might have a dash in it, but we can't call + # that from python (we have to use underscores), so we'll check + # if a dash version of our underscore command exists and use that + # if it does + if "_" in program: path = which(program.replace("_", "-")) + if not path: return None + return path + + +def glob(arg): + return original_glob(arg) or arg + + + + +class RunningCommand(object): + def __init__(self, command_ran, process, call_args, stdin=None): + self.command_ran = command_ran + self.process = process + self._stdout = None + self._stderr = None + self.call_args = call_args + + # we're running in the background, return self and let us lazily + # evaluate + if self.call_args["bg"]: return + + # we're running this command as a with context, don't do anything + # because nothing was started to run from Command.__call__ + if self.call_args["with"]: return + + # run and block + if stdin: stdin = stdin.encode("utf8") + self._stdout, self._stderr = self.process.communicate(stdin) + self._handle_exit_code(self.process.wait()) + + def __enter__(self): + # we don't actually do anything here because anything that should + # have been done would have been done in the Command.__call__ call. + # essentially all that has to happen is the comand be pushed on + # the prepend stack. + pass + + def __exit__(self, typ, value, traceback): + if self.call_args["with"] and Command._prepend_stack: + Command._prepend_stack.pop() + + def __str__(self): + if IS_PY3: return self.__unicode__() + else: return unicode(self).encode("utf8") + + def __unicode__(self): + if self.process: + if self.call_args["bg"]: self.wait() + if self._stdout: return self.stdout + else: return "" + + def __eq__(self, other): + return unicode(self) == unicode(other) + + def __contains__(self, item): + return item in str(self) + + def __getattr__(self, p): + # let these three attributes pass through to the Popen object + if p in ("send_signal", "terminate", "kill"): + if self.process: return getattr(self.process, p) + else: raise AttributeError + return getattr(unicode(self), p) + + def __repr__(self): + return " ") + except (ValueError, EOFError): break + + try: exec(compile(line, "", "single"), env, env) + except SystemExit: break + except: print(traceback.format_exc()) + + # cleans up our last line + print("") + + + + +# this is a thin wrapper around THIS module (we patch sys.modules[__name__]). +# this is in the case that the user does a "from pbs import whatever" +# in other words, they only want to import certain programs, not the whole +# system PATH worth of commands. in this case, we just proxy the +# import lookup to our Environment class +class SelfWrapper(ModuleType): + def __init__(self, self_module): + # this is super ugly to have to copy attributes like this, + # but it seems to be the only way to make reload() behave + # nicely. if i make these attributes dynamic lookups in + # __getattr__, reload sometimes chokes in weird ways... + for attr in ["__builtins__", "__doc__", "__name__", "__package__"]: + setattr(self, attr, getattr(self_module, attr)) + + self.self_module = self_module + self.env = Environment(globals()) + + def __getattr__(self, name): + return self.env[name] + + + + + +# we're being run as a stand-alone script, fire up a REPL +if __name__ == "__main__": + globs = globals() + f_globals = {} + for k in ["__builtins__", "__doc__", "__name__", "__package__"]: + f_globals[k] = globs[k] + env = Environment(f_globals) + run_repl(env) + +# we're being imported from somewhere +else: + self = sys.modules[__name__] + sys.modules[__name__] = SelfWrapper(self) diff --git a/src/rezplugins/release_hook/command.py b/src/rezplugins/release_hook/command.py index f056ace6f..c1f9dd2b2 100644 --- a/src/rezplugins/release_hook/command.py +++ b/src/rezplugins/release_hook/command.py @@ -9,7 +9,12 @@ from rez.utils.scope import scoped_formatter from rez.utils.formatting import expandvars from rez.vendor.schema.schema import Schema, Or, Optional, Use, And -from rez.vendor.sh.sh import Command, ErrorReturnCode, sudo, which +import platform +if "windows" in platform.system().lower(): + from rez.vendor.pbs import Command, ErrorReturnCode, which + sudo = None +else: + from rez.vendor.sh.sh import Command, ErrorReturnCode, sudo, which import getpass import sys import os @@ -75,7 +80,7 @@ def _execute(cmd, arguments): return False run_cmd = Command(cmd_full_path) - if user == 'root': + if user == 'root' and sudo is not None: with sudo: return _execute(run_cmd, cmd_arguments) elif user and user != getpass.getuser(): diff --git a/src/rezplugins/release_vcs/git.py b/src/rezplugins/release_vcs/git.py index a3ca23da3..adbe05640 100644 --- a/src/rezplugins/release_vcs/git.py +++ b/src/rezplugins/release_vcs/git.py @@ -4,7 +4,11 @@ from rez.release_vcs import ReleaseVCS from rez.utils.logging_ import print_error, print_warning, print_debug from rez.exceptions import ReleaseVCSError -from rez.vendor.sh.sh import git +import platform +if "windows" in platform.system().lower(): + from rez.vendor.pbs import git +else: + from rez.vendor.sh.sh import git from shutil import rmtree import functools import os.path From 425d6327087d024c068d9044cce0baca33e8c884 Mon Sep 17 00:00:00 2001 From: Paul Molodowitch Date: Thu, 12 Jan 2017 16:35:18 -0800 Subject: [PATCH 020/124] invalidate memcache if a package file has been deleted / moved / etc --- src/rez/resolver.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/src/rez/resolver.py b/src/rez/resolver.py index 6f363c02d..600c5ec16 100644 --- a/src/rez/resolver.py +++ b/src/rez/resolver.py @@ -223,8 +223,17 @@ def _packages_changed(key, data): new_state = variant_states.get(variant) if new_state is None: - repo = variant.resource._repository - new_state = repo.get_variant_state_handle(variant.resource) + try: + repo = variant.resource._repository + new_state = repo.get_variant_state_handle(variant.resource) + except (IOError, OSError) as e: + # if, ie a package file was deleted on disk, then + # an IOError or OSError will be raised when we try to + # read from it - assume that the packages have changed! + self._print("Error loading %r (assuming cached state " + "changed): %s", variant.qualified_name, + e) + return True variant_states[variant] = new_state if old_state != new_state: From 560e835b4ba8346e11964febb00b66fbb852d46a Mon Sep 17 00:00:00 2001 From: Brendan Abel Date: Fri, 13 Jan 2017 14:21:57 -0800 Subject: [PATCH 021/124] Updated windows cmd shell plugin and platform definition to correctly support interactive environment creation in subshells. --- src/rez/utils/platform_.py | 2 +- src/rezplugins/shell/cmd.py | 26 ++++++++++++++++++-------- 2 files changed, 19 insertions(+), 9 deletions(-) diff --git a/src/rez/utils/platform_.py b/src/rez/utils/platform_.py index 5cc737977..eb404bfcf 100644 --- a/src/rez/utils/platform_.py +++ b/src/rez/utils/platform_.py @@ -482,7 +482,7 @@ def symlink(self, source, link_name): raise ctypes.WinError() def _terminal_emulator_command(self): - return "CMD.exe /Q /K" + return "START" def _physical_cores_from_wmic(self): # windows diff --git a/src/rezplugins/shell/cmd.py b/src/rezplugins/shell/cmd.py index 7228f581b..0b28ce55b 100644 --- a/src/rezplugins/shell/cmd.py +++ b/src/rezplugins/shell/cmd.py @@ -2,7 +2,7 @@ Windows Command Prompt (DOS) shell. """ from rez.config import config -from rez.rex import RexExecutor, literal +from rez.rex import RexExecutor, literal, OutputStyle from rez.shells import Shell from rez.system import system from rez.utils.platform_ import platform_ @@ -117,9 +117,6 @@ def _record_shell(ex, files, bind_rez=True, print_msg=False): if bind_rez: ex.interpreter._bind_interactive_rez() if print_msg and not quiet: -# ex.info('') -# ex.info('You are now in a rez-configured environment.') -# ex.info('') if system.is_production_rez_install: # previously this was called with the /K flag, however # that would leave spawn_shell hung on a blocked call @@ -147,7 +144,7 @@ def _create_ex(): if shell_command: executor.command(shell_command) - executor.command('exit %errorlevel%') + executor.command('exit %errorlevel%') code = executor.get_output() target_file = os.path.join(tmpdir, "rez-shell.%s" @@ -165,10 +162,23 @@ def _create_ex(): cmd = pre_command.strip().split() else: cmd = pre_command - cmd = cmd + [self.executable, "/Q", "/K", target_file] - p = subprocess.Popen(cmd, env=env, **Popen_args) + cmd = cmd + [self.executable, "/Q", "/K", 'call {}'.format(target_file)] + is_detached = cmd[0] == 'START' + p = subprocess.Popen(cmd, env=env, shell=is_detached, **Popen_args) return p + def get_output(self, style=OutputStyle.file): + if style == OutputStyle.file: + script = '\n'.join(self._lines) + '\n' + else: # eval style + lines = [] + for line in self._lines: + if not line.startswith('REM'): # strip comments + line = line.rstrip() + lines.append(line) + script = '&& '.join(lines) + return script + def escape_string(self, value): return value @@ -193,7 +203,7 @@ def alias(self, key, value): def comment(self, value): for line in value.split('\n'): - self._addline(': %s' % line) + self._addline('REM %s' % line) def info(self, value): for line in value.split('\n'): From d381c023622a5edab792618412e2b8c43d6e0650 Mon Sep 17 00:00:00 2001 From: Brendan Abel Date: Thu, 19 Jan 2017 16:33:13 -0800 Subject: [PATCH 022/124] Updated windows cmd shell plugin to correctly exit environment shell when running in non-interactive command mode. --- .gitignore | 1 + src/rezplugins/shell/cmd.py | 13 ++++++++++++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 5f9ad53aa..3a068683b 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,4 @@ dist/ *~ docs/_build .DS_Store +.idea/ \ No newline at end of file diff --git a/src/rezplugins/shell/cmd.py b/src/rezplugins/shell/cmd.py index 0b28ce55b..db1662f24 100644 --- a/src/rezplugins/shell/cmd.py +++ b/src/rezplugins/shell/cmd.py @@ -107,9 +107,14 @@ def spawn_shell(self, context_file, tmpdir, rcfile=None, norc=False, stdin=False, command=None, env=None, quiet=False, pre_command=None, **Popen_args): + print 'COMMAND', command + print 'pre_command', pre_command + startup_sequence = self.get_startup_sequence(rcfile, norc, bool(stdin), command) shell_command = None + print 'SSEQ', startup_sequence + def _record_shell(ex, files, bind_rez=True, print_msg=False): ex.source(context_file) if startup_sequence["envvar"]: @@ -162,7 +167,13 @@ def _create_ex(): cmd = pre_command.strip().split() else: cmd = pre_command - cmd = cmd + [self.executable, "/Q", "/K", 'call {}'.format(target_file)] + + if shell_command: + cmd_flags = ['/Q', '/C'] + else: + cmd_flags = ['/Q', '/K'] + + cmd = cmd + [self.executable] + cmd_flags + ['call {}'.format(target_file)] is_detached = cmd[0] == 'START' p = subprocess.Popen(cmd, env=env, shell=is_detached, **Popen_args) return p From 084d112ca290e60bfd8ab7b51bc448b26899ed83 Mon Sep 17 00:00:00 2001 From: Brendan Abel Date: Thu, 19 Jan 2017 16:37:28 -0800 Subject: [PATCH 023/124] removed some print debugs --- src/rezplugins/shell/cmd.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/src/rezplugins/shell/cmd.py b/src/rezplugins/shell/cmd.py index db1662f24..2d9d406a6 100644 --- a/src/rezplugins/shell/cmd.py +++ b/src/rezplugins/shell/cmd.py @@ -107,14 +107,9 @@ def spawn_shell(self, context_file, tmpdir, rcfile=None, norc=False, stdin=False, command=None, env=None, quiet=False, pre_command=None, **Popen_args): - print 'COMMAND', command - print 'pre_command', pre_command - startup_sequence = self.get_startup_sequence(rcfile, norc, bool(stdin), command) shell_command = None - print 'SSEQ', startup_sequence - def _record_shell(ex, files, bind_rez=True, print_msg=False): ex.source(context_file) if startup_sequence["envvar"]: From 6451eaaae545a5415826d85e35d43ab1af81b37e Mon Sep 17 00:00:00 2001 From: Federico Naum Date: Fri, 27 Jan 2017 15:46:20 +1100 Subject: [PATCH 024/124] Avoid trying to create the .building file while running rez build. Only do it if install was provided --- src/rezplugins/build_process/local.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/src/rezplugins/build_process/local.py b/src/rezplugins/build_process/local.py index c44a317c9..ca109476d 100644 --- a/src/rezplugins/build_process/local.py +++ b/src/rezplugins/build_process/local.py @@ -102,18 +102,19 @@ def _build_variant_base(self, variant, build_type, install_path=None, variant_build_path = os.path.join(variant_build_path, variant.subpath) variant_install_path = os.path.join(variant_install_path, variant.subpath) - # inform package repo that a variant is about to be built/installed - pkg_repo = package_repository_manager.get_repository(install_path) - pkg_repo.pre_variant_install(variant.resource) - # create directories (build, install) if clean and os.path.exists(variant_build_path): shutil.rmtree(variant_build_path) if not os.path.exists(variant_build_path): os.makedirs(variant_build_path) - if install and not os.path.exists(variant_install_path): - os.makedirs(variant_install_path) + if install: + # inform package repo that a variant is about to be built/installed + pkg_repo = package_repository_manager.get_repository(install_path) + pkg_repo.pre_variant_install(variant.resource) + + if os.path.exists(variant_install_path): + os.makedirs(variant_install_path) # create build environment context, rxt_filepath = self.create_build_context( From 7fc9d43afd4ef04d4e941a864c26e1c3c8a04b2e Mon Sep 17 00:00:00 2001 From: ajohns Date: Wed, 1 Feb 2017 15:52:28 +1100 Subject: [PATCH 025/124] -added debugging help via USR1 signal --- src/rez/__init__.py | 17 +++++++++++++++++ src/rez/utils/_version.py | 2 +- 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/src/rez/__init__.py b/src/rez/__init__.py index 86dd66279..d7d12c138 100644 --- a/src/rez/__init__.py +++ b/src/rez/__init__.py @@ -18,6 +18,23 @@ logging.config.fileConfig(logging_conf_file, disable_existing_loggers=False) +# actions registered on SIGUSR1 +action = os.getenv("REZ_SIGUSR1_ACTION") +if action: + import signal, traceback + + if action == "print_stack": + def callback(sig, frame): + txt = ''.join(traceback.format_stack(frame)) + print + print txt + else: + callback = None + + if callback: + signal.signal(signal.SIGUSR1, callback) # Register handler + + # Copyright 2013-2016 Allan Johns. # # This library is free software: you can redistribute it and/or diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index 7e6898043..a220a90bb 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.2.0" +_rez_version = "2.2.1" # Copyright 2013-2016 Allan Johns. From a3c726059d41766497ce2934cf23e89efe40d8f1 Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 4 Feb 2017 08:43:44 +1100 Subject: [PATCH 026/124] -added more configurable email recipient addresses on post release --- .../emailer-recipients-example.yaml | 43 ++++++++++ src/rezplugins/release_hook/emailer.py | 79 +++++++++++++++++-- src/rezplugins/release_hook/rezconfig | 5 +- 3 files changed, 120 insertions(+), 7 deletions(-) create mode 100644 src/rezplugins/release_hook/emailer-recipients-example.yaml diff --git a/src/rezplugins/release_hook/emailer-recipients-example.yaml b/src/rezplugins/release_hook/emailer-recipients-example.yaml new file mode 100644 index 000000000..4fa8d1a15 --- /dev/null +++ b/src/rezplugins/release_hook/emailer-recipients-example.yaml @@ -0,0 +1,43 @@ +# +# This is an example of a 'recipients' config file. To use one, set the +# 'recipients' config setting to this filepath, rather than a list of email +# addresses. +# +# This config gives more fine-grained control of who is emailed when a package +# is released, and all in one place (after all, you can override the setting +# 'config.plugins.release_hook.emailer.recipients' in any package.py as well). +# +# In this example: +# * An explicit list of packages email one address; +# * All packages with the attribute package_type='external', email another address; +# * All packages (regardless of whether they meet the above criteria) are sent +# to another address. +# +# A mail will be sent to every address that matches the package being released. +# +# The 'filters' section performs simple value matching on the package's +# attributes. If the value given is a list, then the filter will match on any of +# the given values. If more than one attribute is listed under 'filters', then +# they are ANDed - ie all attributes must match. Only simple types (string, int) +# can be tested - testing any other attribute type will fail. If no attribute +# value is given (ie, its value is None), it will match if the attribute exists +# on the package, regardless of value. +# +# Note the lack of 'filters' in the last rule - this means the rule will match +# all packages. +# + +rules: +- filters: + name: + - maya_utils + - maya_anim_tools + - maya_scn_mgr + recipients: + - maya_devs@myvfxstudio.com +- filters: + package_type: external + recipients: + - external_pkg_release@myvfxstudio.com +- recipients: + - rez-release@myvfxstudio.com diff --git a/src/rezplugins/release_hook/emailer.py b/src/rezplugins/release_hook/emailer.py index 6d8759bbb..c939c0826 100644 --- a/src/rezplugins/release_hook/emailer.py +++ b/src/rezplugins/release_hook/emailer.py @@ -4,8 +4,11 @@ from rez.release_hook import ReleaseHook from rez.system import system from email.mime.text import MIMEText -from rez.utils.logging_ import print_warning +from rez.utils.logging_ import print_warning, print_error +from rez.utils.yaml import load_yaml from rez.utils.scope import scoped_formatter +from rez.vendor.schema.schema import Or +import os.path import smtplib import sys @@ -18,7 +21,7 @@ class EmailReleaseHook(ReleaseHook): "smtp_host": basestring, "smtp_port": int, "sender": basestring, - "recipients": [basestring]} + "recipients": Or(basestring, [basestring])} @classmethod def name(cls): @@ -58,27 +61,91 @@ def post_release(self, user, install_path, variants, release_message=None, def send_email(self, subject, body): if not self.settings.recipients: return # nothing to do, sending email to nobody + if not self.settings.smtp_host: print_warning("did not send release email: " "SMTP host is not specified") return + recipients = self.get_recipients() + if not recipients: + return + print "Sending release email to:" - print '\n'.join("- %s" % x for x in self.settings.recipients) + print '\n'.join("- %s" % x for x in recipients) msg = MIMEText(body) msg["Subject"] = subject msg["From"] = self.settings.sender - msg["To"] = str(',').join(self.settings.recipients) + msg["To"] = str(',').join(recipients) try: s = smtplib.SMTP(self.settings.smtp_host, self.settings.smtp_port) s.sendmail(from_addr=self.settings.sender, - to_addrs=self.settings.recipients, + to_addrs=recipients, msg=msg.as_string()) print 'Email(s) sent.' except Exception, e: - print >> sys.stderr, "release email delivery failed: %s" % str(e) + print_error("release email delivery failed: %s" % str(e)) + + def get_recipients(self): + value = self.settings.recipients + + if isinstance(value, list): + return value + + if os.path.exists(value): + filepath = value + + try: + return self.load_recipients(filepath) + except Exception as e: + print_error("failed to load recipients config: %s. Emails " + "not sent" % str(e)) + elif '@' in value: + return [value] # assume it's an email address + else: + print_error("email recipient file does not exist: %s. Emails not " + "sent" % value) + + return [] + + def load_recipients(self, filepath): + def test(value, type_): + if not isinstance(value, type_): + raise TypeError("Expected %s, not %s" % type_, value) + return value + + conf = load_yaml(filepath) + recipients = set() + + for rule in test(conf.get("rules", []), list): + filters = rule.get("filters") + match = True + + if filters: + for attr, test_value in test(filters, dict).iteritems(): + + missing = object() + value = getattr(self.package, attr, missing) + + if value is missing: + match = False + elif test_value is None: + match = True + elif isinstance(test_value, list): + match = (value in test_value) + else: + match = (value == test_value) + + if not match: + break + + if match: + rule_recipients = rule.get("recipients") + recipients.update(test(rule_recipients, list)) + + return sorted(recipients) def register_plugin(): diff --git a/src/rezplugins/release_hook/rezconfig b/src/rezplugins/release_hook/rezconfig index 0355b0612..6c7d40927 100644 --- a/src/rezplugins/release_hook/rezconfig +++ b/src/rezplugins/release_hook/rezconfig @@ -8,7 +8,10 @@ emailer: # The address that post-release emails appear to come from. sender: '{system.user}@rez-release.com' - # List of recipients of post-release emails. + # List of recipients of post-release emails; OR, path to recipients config + # file (see emailer-recipients-example.yaml). If this is a string that + # contains '@' and doesn't refer to a filepath, then it's treated as an + # email address. recipients: [] # Message format. Available objects for formatting are: From 8696fed2396a45352a8f94778450cd762591fae0 Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 4 Feb 2017 08:50:01 +1100 Subject: [PATCH 027/124] version up --- src/rez/utils/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index 9ea5cf1d0..1e2503763 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.3.0" +_rez_version = "2.4.0" # Copyright 2013-2016 Allan Johns. From 43a8ae92d73851a6ecd6a7bec77ad569b7c1ef19 Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 4 Feb 2017 11:26:10 +1100 Subject: [PATCH 028/124] -added changelog to end of release notes file used if prompt_release_notes is True. --- src/rez/build_process_.py | 27 +++++++++++++++++++++++---- src/rez/cli/release.py | 34 ++++++++++++++++++++++++++++++++-- src/rez/utils/_version.py | 2 +- 3 files changed, 56 insertions(+), 7 deletions(-) diff --git a/src/rez/build_process_.py b/src/rez/build_process_.py index 678f7ec21..2ba8a5136 100644 --- a/src/rez/build_process_.py +++ b/src/rez/build_process_.py @@ -137,6 +137,14 @@ def release(self, release_message=None, variants=None): """ raise NotImplementedError + def get_changelog(self): + """Get the changelog since last package release. + + Returns: + str: Changelog. + """ + raise NotImplementedError + class BuildProcessHelper(BuildProcess): """A BuildProcess base class with some useful functionality. @@ -314,6 +322,19 @@ def get_previous_release(self): return package return None + def get_changelog(self): + previous_package = self.get_previous_release() + if previous_package: + previous_revision = previous_package.revision + else: + previous_revision = None + + changelog = None + with self.repo_operation(): + changelog = self.vcs.get_changelog(previous_revision) + + return changelog + def get_release_data(self): """Get release data for this release. @@ -333,12 +354,10 @@ def get_release_data(self): previous_version=previous_version) revision = None - changelog = None - with self.repo_operation(): revision = self.vcs.get_current_revision() - with self.repo_operation(): - changelog = self.vcs.get_changelog(previous_revision) + + changelog=self.get_changelog() # truncate changelog - very large changelogs can cause package load # times to be very high, we don't want that diff --git a/src/rez/cli/release.py b/src/rez/cli/release.py index a3e0b7966..dc616b997 100644 --- a/src/rez/cli/release.py +++ b/src/rez/cli/release.py @@ -79,14 +79,44 @@ def command(opts, parser, extra_arg_groups=None): filename = "rez-release-message-%s.txt" % h filepath = os.path.join(config.tmpdir, filename) + header = "" + changelog_token = "###" + if not os.path.exists(filepath): + txt = header + + # get changelog and add to release notes file, for reference. They + # get stripped out again before being added as package release notes. + try: + changelog = builder.get_changelog() + except: + pass + + if changelog: + txt += ("\n\n%s This is for reference only - this line and all " + "following lines will be stripped from the release " + "notes.\n\n" % changelog_token) + txt += changelog + with open(filepath, 'w') as f: - print >> f, "Enter your release notes here." + print >> f, txt call([config.editor, filepath]) with open(filepath) as f: - release_msg = f.read().strip() + release_msg = f.read() + + # strip changelog out + try: + i = release_msg.index(changelog_token) + release_msg = release_msg[:i] + except ValueError: + pass + + # strip header out + release_msg = release_msg.replace(header, "") + + release_msg = release_msg.strip() if not release_msg: ch = None diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index 1e2503763..bd33cf680 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.4.0" +_rez_version = "2.4.1" # Copyright 2013-2016 Allan Johns. From 1924205a7df4139d2424f7d2ea39cd57d4c678a9 Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 7 Feb 2017 18:55:43 +1100 Subject: [PATCH 029/124] -added 'postprocess' attribute to package.py -updated build process so 'developer' package only created once. --- src/rez/build_process_.py | 14 +++-- src/rez/build_system.py | 11 ++-- src/rez/cli/build.py | 6 +++ src/rez/cli/release.py | 5 ++ src/rez/package_maker__.py | 4 +- src/rez/package_resources_.py | 11 +++- src/rez/packages_.py | 47 ++++++++++++++++ src/rez/serialise.py | 20 +++---- src/rez/utils/_version.py | 2 +- src/rez/utils/data_utils.py | 53 ++++++++++++++++--- src/rezplugins/build_system/bez.py | 3 +- src/rezplugins/build_system/cmake.py | 5 +- src/rezplugins/build_system/make.py | 2 +- .../package_repository/filesystem.py | 6 ++- 14 files changed, 155 insertions(+), 34 deletions(-) diff --git a/src/rez/build_process_.py b/src/rez/build_process_.py index 2ba8a5136..53bb5b57a 100644 --- a/src/rez/build_process_.py +++ b/src/rez/build_process_.py @@ -19,8 +19,8 @@ def get_build_process_types(): return plugin_manager.get_plugins('build_process') -def create_build_process(process_type, working_dir, build_system, vcs=None, - ensure_latest=True, skip_repo_errors=False, +def create_build_process(process_type, working_dir, build_system, package=None, + vcs=None, ensure_latest=True, skip_repo_errors=False, ignore_existing_tag=False, verbose=False): """Create a `BuildProcess` instance.""" from rez.plugin_managers import plugin_manager @@ -30,6 +30,7 @@ def create_build_process(process_type, working_dir, build_system, vcs=None, cls = plugin_manager.get_plugin_class('build_process', process_type) return cls(working_dir, + package=package, build_system=build_system, vcs=vcs, ensure_latest=ensure_latest, @@ -58,8 +59,9 @@ class BuildProcess(object): def name(cls): raise NotImplementedError - def __init__(self, working_dir, build_system, vcs=None, ensure_latest=True, - skip_repo_errors=False, ignore_existing_tag=False, verbose=False): + def __init__(self, working_dir, build_system, package=None, vcs=None, + ensure_latest=True, skip_repo_errors=False, + ignore_existing_tag=False, verbose=False): """Create a BuildProcess. Args: @@ -90,7 +92,9 @@ def __init__(self, working_dir, build_system, vcs=None, ensure_latest=True, "Build process was instantiated with a mismatched VCS instance") self.debug_print = config.debug_printer("package_release") - self.package = get_developer_package(working_dir) + + self.package = package or get_developer_package(working_dir) + hook_names = self.package.config.release_hooks or [] self.hooks = create_release_hooks(hook_names, working_dir) self.build_path = os.path.join(self.working_dir, diff --git a/src/rez/build_system.py b/src/rez/build_system.py index 263ee1de2..85aaf3307 100644 --- a/src/rez/build_system.py +++ b/src/rez/build_system.py @@ -21,7 +21,7 @@ def get_valid_build_systems(working_dir): return clss -def create_build_system(working_dir, buildsys_type=None, opts=None, +def create_build_system(working_dir, buildsys_type=None, package=None, opts=None, write_build_scripts=False, verbose=False, build_args=[], child_build_args=[]): """Return a new build system that can build the source in working_dir.""" @@ -46,6 +46,7 @@ def create_build_system(working_dir, buildsys_type=None, opts=None, cls = iter(clss).next() return cls(working_dir, opts=opts, + package=package, write_build_scripts=write_build_scripts, verbose=verbose, build_args=build_args, @@ -63,8 +64,9 @@ def name(cls): """Return the name of the build system, eg 'make'.""" raise NotImplementedError - def __init__(self, working_dir, opts=None, write_build_scripts=False, - verbose=False, build_args=[], child_build_args=[]): + def __init__(self, working_dir, opts=None, package=None, + write_build_scripts=False, verbose=False, build_args=[], + child_build_args=[]): """Create a build system instance. Args: @@ -84,7 +86,8 @@ def __init__(self, working_dir, opts=None, write_build_scripts=False, raise BuildSystemError("Not a valid %s working directory: %s" % (self.name(), working_dir)) - self.package = get_developer_package(working_dir) + self.package = package or get_developer_package(working_dir) + self.write_build_scripts = write_build_scripts self.build_args = build_args self.child_build_args = child_build_args diff --git a/src/rez/cli/build.py b/src/rez/cli/build.py index 61071f17b..7960c6bf7 100644 --- a/src/rez/cli/build.py +++ b/src/rez/cli/build.py @@ -82,16 +82,21 @@ def get_build_args(opts, parser, extra_arg_groups): def command(opts, parser, extra_arg_groups=None): from rez.exceptions import BuildContextResolveError + from rez.packages_ import get_developer_package from rez.build_process_ import create_build_process from rez.build_system import create_build_system import sys + # load package working_dir = os.getcwd() + package = get_developer_package(working_dir) # create build system build_args, child_build_args = get_build_args(opts, parser, extra_arg_groups) buildsys_type = opts.buildsys if ("buildsys" in opts) else None + buildsys = create_build_system(working_dir, + package=package, buildsys_type=buildsys_type, opts=opts, write_build_scripts=opts.scripts, @@ -102,6 +107,7 @@ def command(opts, parser, extra_arg_groups=None): # create and execute build process builder = create_build_process(opts.process, working_dir, + package=package, build_system=buildsys, verbose=True) diff --git a/src/rez/cli/release.py b/src/rez/cli/release.py index dc616b997..f11fa64fe 100644 --- a/src/rez/cli/release.py +++ b/src/rez/cli/release.py @@ -36,13 +36,16 @@ def setup_parser(parser, completions=False): def command(opts, parser, extra_arg_groups=None): + from rez.packages_ import get_developer_package from rez.build_process_ import create_build_process from rez.build_system import create_build_system from rez.release_vcs import create_release_vcs from rez.cli.build import get_build_args from rez.config import config + # load package working_dir = os.getcwd() + package = get_developer_package(working_dir) # create vcs vcs = create_release_vcs(working_dir, opts.vcs) @@ -52,6 +55,7 @@ def command(opts, parser, extra_arg_groups=None): buildsys_type = opts.buildsys if ("buildsys" in opts) else None buildsys = create_build_system(working_dir, + package=package, buildsys_type=buildsys_type, opts=opts, verbose=True, @@ -61,6 +65,7 @@ def command(opts, parser, extra_arg_groups=None): # create and execute release process builder = create_build_process(opts.process, working_dir, + package=package, build_system=buildsys, vcs=vcs, ensure_latest=(not opts.no_latest), diff --git a/src/rez/package_maker__.py b/src/rez/package_maker__.py index d12db1fb9..7b9a0e7b2 100644 --- a/src/rez/package_maker__.py +++ b/src/rez/package_maker__.py @@ -3,7 +3,7 @@ from rez.utils.formatting import PackageRequest from rez.utils.data_utils import AttrDictWrapper from rez.utils.logging_ import print_warning -from rez.package_resources_ import help_schema, _commands_schema +from rez.package_resources_ import help_schema, _commands_schema, _function_schema from rez.package_repository import create_memory_package_repository from rez.packages_ import Package from rez.vendor.schema.schema import Schema, Optional, Or, Use, And @@ -38,6 +38,8 @@ Optional('commands'): _commands_schema, Optional('post_commands'): _commands_schema, + Optional("postprocess"): _function_schema, + # arbitrary fields Optional(basestring): object }) diff --git a/src/rez/package_resources_.py b/src/rez/package_resources_.py index 67dc3d53e..7960125fb 100644 --- a/src/rez/package_resources_.py +++ b/src/rez/package_resources_.py @@ -23,6 +23,11 @@ 'previous_revision', 'vcs') +# package attributes that we don't install +package_build_only_keys = ( + "postprocess", +) + #------------------------------------------------------------------------------ # utility schemas @@ -92,7 +97,8 @@ # package package_schema_dict = package_base_schema_dict.copy() package_schema_dict.update({ - Optional("variants"): [[PackageRequest]] + Optional("variants"): [[PackageRequest]], + Optional("postprocess"): SourceCode }) @@ -122,6 +128,7 @@ basestring, # commands in text block [basestring]) # old-style (rez-1) commands +_function_schema = Or(SourceCode, callable) _package_request_schema = And(basestring, Use(PackageRequest)) @@ -156,6 +163,8 @@ Optional('commands'): _commands_schema, Optional('post_commands'): _commands_schema, + Optional("postprocess"): _function_schema, + Optional("timestamp"): int, Optional('revision'): object, Optional('changelog'): large_string_dict, diff --git a/src/rez/packages_.py b/src/rez/packages_.py index 90edfb7a8..d90e50191 100644 --- a/src/rez/packages_.py +++ b/src/rez/packages_.py @@ -3,6 +3,7 @@ VariantResource, package_family_schema, package_schema, variant_schema, \ package_release_keys from rez.package_serialise import dump_package_data +from rez.utils.logging_ import print_info from rez.utils.data_utils import cached_property from rez.utils.formatting import StringFormatMixin, StringFormatType from rez.utils.filesystem import is_subdirectory @@ -495,7 +496,11 @@ def get_developer_package(path): "Error in %r - missing or non-string field 'name'" % filepath) package = create_package(name, data) + package = _postprocess_package(package, data) + + # graft on developer-package-specific attributes setattr(package, "filepath", filepath) + return package @@ -631,6 +636,48 @@ def get_latest_package(name, range_=None, paths=None, error=False): return None +def _postprocess_package(package, data): + postprocess = getattr(package, "postprocess", None) + if not postprocess: + return package + + from rez.serialise import process_python_objects + from rez.utils.data_utils import get_dict_diff + from copy import deepcopy + + postprocessed_data = deepcopy(data) + + # apply postprocessing + postprocess.func(this=package, data=postprocessed_data) + + # if postprocess added functions, these need to be converted to + # SourceCode instances + postprocessed_data = process_python_objects(postprocessed_data) + + if postprocessed_data != data: + # recreate package from modified package data + package = create_package(package.name, postprocessed_data) + + # print summary of changed package attributes + added, removed, changed = get_dict_diff(data, postprocessed_data) + lines = ["Package attributes were changed in post processing:"] + + if added: + lines.append("Added attributes: %s" + % ['.'.join(x) for x in added]) + if removed: + lines.append("Removed attributes: %s" + % ['.'.join(x) for x in removed]) + if changed: + lines.append("Changed attributes: %s" + % ['.'.join(x) for x in changed]) + + txt = '\n'.join(lines) + print_info(txt) + + return package + + def _get_families(name, paths=None): entries = [] for path in (paths or config.packages_path): diff --git a/src/rez/serialise.py b/src/rez/serialise.py index 451286ef5..b70210c06 100644 --- a/src/rez/serialise.py +++ b/src/rez/serialise.py @@ -156,19 +156,21 @@ def load_py(stream, filepath=None): (k not in __builtins__ or __builtins__[k] != v): result[k] = v - def _process_objects(data): - for k, v in data.iteritems(): - if isfunction(v): - data[k] = SourceCode.from_function(v) - elif isinstance(v, dict): - _process_objects(v) - return data - result.update(scopes.to_dict()) - result = _process_objects(result) + result = process_python_objects(result) return result +def process_python_objects(data): + for k, v in data.iteritems(): + if isfunction(v): + data[k] = SourceCode.from_function(v) + elif isinstance(v, dict): + process_python_objects(v) + + return data + + def load_yaml(stream, **kwargs): """Load yaml-formatted data from a stream. diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index bd33cf680..4a67f9917 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.4.1" +_rez_version = "2.5.0" # Copyright 2013-2016 Allan Johns. diff --git a/src/rez/utils/data_utils.py b/src/rez/utils/data_utils.py index c0c27b4f5..9ef0257c6 100644 --- a/src/rez/utils/data_utils.py +++ b/src/rez/utils/data_utils.py @@ -13,20 +13,56 @@ class _Missing: pass _missing = _Missing() +def get_dict_diff(d1, d2): + """Get added/removed/changed keys between two dicts. + + Each key in the return value is a list, which is the namespaced key that + was affected. + + Returns: + 3-tuple: + - list of added keys; + - list of removed key; + - list of changed keys. + """ + def _diff(d1_, d2_, namespace): + added = [] + removed = [] + changed = [] + + for k1, v1 in d1_.iteritems(): + if k1 not in d2_: + removed.append(namespace + [k1]) + else: + v2 = d2_[k1] + if v2 != v1: + if isinstance(v1, dict) and isinstance(v2, dict): + namespace_ = namespace + [k1] + added_, removed_, changed_ = _diff(v1, v2, namespace_) + added.extend(added_) + removed.extend(removed_) + changed.extend(changed_) + else: + changed.append(namespace + [k1]) + + for k2 in d2_.iterkeys(): + if k2 not in d1_: + added.append(namespace + [k2]) + + return added, removed, changed + + return _diff(d1, d2, []) + + class SourceCode(object): """Very simple wrapper for python source code.""" - def __init__(self, source): + def __init__(self, source, func=None): self.source = source.rstrip() + self.func = func @classmethod def from_function(cls, func): - argspec = getargspec(func) - if argspec.args or argspec.varargs or argspec.keywords: - raise RexError('top level functions in python rez package files ' - 'cannot take any arguments: %s' % func.__name__) - - # now that we've verified that the func takes no args, can strip out - # the first line of the sourcecode, with the argspec of the func... + # get txt of function body loc = getsourcelines(func)[0][1:] code = dedent(''.join(loc)) @@ -48,6 +84,7 @@ def from_function(cls, func): value = SourceCode.__new__(SourceCode) value.source = code + value.func = func return value def corrected_for_indent(self): diff --git a/src/rezplugins/build_system/bez.py b/src/rezplugins/build_system/bez.py index db9d2d464..e244d19c5 100644 --- a/src/rezplugins/build_system/bez.py +++ b/src/rezplugins/build_system/bez.py @@ -32,10 +32,11 @@ def name(cls): def is_valid_root(cls, path): return os.path.isfile(os.path.join(path, "rezbuild.py")) - def __init__(self, working_dir, opts=None, write_build_scripts=False, + def __init__(self, working_dir, opts=None, package=None, write_build_scripts=False, verbose=False, build_args=[], child_build_args=[]): super(BezBuildSystem, self).__init__(working_dir, opts=opts, + package=package, write_build_scripts=write_build_scripts, verbose=verbose, build_args=build_args, diff --git a/src/rezplugins/build_system/cmake.py b/src/rezplugins/build_system/cmake.py index 007881193..6eca14f25 100644 --- a/src/rezplugins/build_system/cmake.py +++ b/src/rezplugins/build_system/cmake.py @@ -76,11 +76,12 @@ def bind_cli(cls, parser): default=settings.build_system, help="set the cmake build system (default: %(default)s).") - def __init__(self, working_dir, opts=None, write_build_scripts=False, + def __init__(self, working_dir, opts=None, package=None, write_build_scripts=False, verbose=False, build_args=[], child_build_args=[]): super(CMakeBuildSystem, self).__init__( working_dir, opts=opts, + package=package, write_build_scripts=write_build_scripts, verbose=verbose, build_args=build_args, @@ -120,7 +121,7 @@ def _pr(s): cmd += (self.build_args or []) cmd.append("-DCMAKE_INSTALL_PREFIX=%s" % install_path) - cmd.append("-DCMAKE_MODULE_PATH=%s" % + cmd.append("-DCMAKE_MODULE_PATH=%s" % sh.get_key_token("CMAKE_MODULE_PATH").replace('\\', '/')) cmd.append("-DCMAKE_BUILD_TYPE=%s" % self.build_target) cmd.append("-DREZ_BUILD_TYPE=%s" % build_type.name) diff --git a/src/rezplugins/build_system/make.py b/src/rezplugins/build_system/make.py index b9da55d31..2c65e3053 100644 --- a/src/rezplugins/build_system/make.py +++ b/src/rezplugins/build_system/make.py @@ -14,7 +14,7 @@ def name(cls): def is_valid_root(cls, path): return os.path.isfile(os.path.join(path, "Makefile")) - def __init__(self, working_dir, opts=None, write_build_scripts=False, + def __init__(self, working_dir, opts=None, package=None, write_build_scripts=False, verbose=False, build_args=[], child_build_args=[]): super(MakeBuildSystem, self).__init__(working_dir) raise NotImplementedError diff --git a/src/rezplugins/package_repository/filesystem.py b/src/rezplugins/package_repository/filesystem.py index edc256b2f..58ecb040c 100644 --- a/src/rezplugins/package_repository/filesystem.py +++ b/src/rezplugins/package_repository/filesystem.py @@ -4,7 +4,7 @@ from rez.package_repository import PackageRepository from rez.package_resources_ import PackageFamilyResource, PackageResource, \ VariantResourceHelper, PackageResourceHelper, package_pod_schema, \ - package_release_keys + package_release_keys, package_build_only_keys from rez.serialise import clear_file_caches, open_file_for_write from rez.package_serialise import dump_package_data from rez.exceptions import PackageMetadataError, ResourceError, RezSystemError, \ @@ -755,10 +755,14 @@ def _create_variant(self, variant, dry_run=False, overrides=None): existing_package_data = None existing_variants_data = None release_data = {} + new_package_data = variant.parent.validated_data() new_package_data.pop("variants", None) package_changed = False + for key in package_build_only_keys: + new_package_data.pop(key, None) + if existing_package: existing_package_data = existing_package.validated_data() From f072313f633806264d2232a9d7e8169243954493 Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 14 Feb 2017 15:34:29 +1100 Subject: [PATCH 030/124] -added package postprocessing; -added shared code both at build/runtime; -added @early decorator for package attribs-as-functions --- src/rez/cli/_main.py | 7 +- src/rez/config.py | 3 + src/rez/developer_package.py | 198 ++++++++++++++++ src/rez/exceptions.py | 5 + src/rez/package_maker__.py | 9 +- src/rez/package_resources_.py | 5 +- src/rez/package_serialise.py | 17 +- src/rez/packages_.py | 98 +------- src/rez/resolved_context.py | 9 + src/rez/rex.py | 16 +- src/rez/rezconfig.py | 73 +++++- src/rez/serialise.py | 32 ++- src/rez/tests/test_packages.py | 2 +- src/rez/util.py | 2 +- src/rez/utils/data_utils.py | 56 ----- src/rez/utils/filesystem.py | 13 +- src/rez/utils/sourcecode.py | 238 ++++++++++++++++++++ src/rez/utils/syspath.py | 15 ++ src/rez/utils/yaml.py | 2 +- src/rezplugins/build_process/local.py | 42 +++- src/support/package_utils/README | 4 + src/support/package_utils/get_committers.sh | 16 ++ src/support/package_utils/utils.py | 21 ++ 23 files changed, 699 insertions(+), 184 deletions(-) create mode 100644 src/rez/developer_package.py create mode 100644 src/rez/utils/sourcecode.py create mode 100644 src/rez/utils/syspath.py create mode 100644 src/support/package_utils/README create mode 100644 src/support/package_utils/get_committers.sh create mode 100644 src/support/package_utils/utils.py diff --git a/src/rez/cli/_main.py b/src/rez/cli/_main.py index a5ee23eeb..377e7a163 100644 --- a/src/rez/cli/_main.py +++ b/src/rez/cli/_main.py @@ -4,6 +4,7 @@ import sys from rez.vendor.argparse import _StoreTrueAction, SUPPRESS from rez.cli._util import subcommands, LazyArgumentParser, _env_var_true +from rez.utils.logging_ import print_error from rez.exceptions import RezError, RezSystemError from rez import __version__ @@ -116,10 +117,10 @@ def run_cmd(): try: returncode = run_cmd() except (NotImplementedError, RezSystemError) as e: - import traceback - raise Exception(traceback.format_exc()) + raise except exc_type as e: - print >> sys.stderr, "rez: %s: %s" % (e.__class__.__name__, str(e)) + print_error("%s: %s" % (e.__class__.__name__, str(e))) + #print >> sys.stderr, "rez: %s: %s" % (e.__class__.__name__, str(e)) sys.exit(1) sys.exit(returncode or 0) diff --git a/src/rez/config.py b/src/rez/config.py index bbb140855..761ba12d6 100644 --- a/src/rez/config.py +++ b/src/rez/config.py @@ -214,6 +214,7 @@ def _parse_env_var(self, value): "packages_path": PathList, "plugin_path": PathList, "bind_module_path": PathList, + "package_definition_build_python_paths": PathList, "implicit_packages": StrList, "platform_map": OptionalDict, "parent_variables": StrList, @@ -238,6 +239,7 @@ def _parse_env_var(self, value): "suite_visibility": SuiteVisibility_, "rez_tools_visibility": RezToolsVisibility_, "suite_alias_prefix_char": Char, + "package_definition_python_path": OptionalStr, "tmpdir": OptionalStr, "context_tmpdir": OptionalStr, "default_shell": OptionalStr, @@ -264,6 +266,7 @@ def _parse_env_var(self, value): "implicit_back": OptionalStr, "alias_fore": OptionalStr, "alias_back": OptionalStr, + "package_postprocess_function": OptionalStr, "build_thread_count": BuildThreadCount_, "resource_caching_maxsize": Int, "max_package_changelog_chars": Int, diff --git a/src/rez/developer_package.py b/src/rez/developer_package.py new file mode 100644 index 000000000..db50ce776 --- /dev/null +++ b/src/rez/developer_package.py @@ -0,0 +1,198 @@ +from rez.config import config +from rez.packages_ import Package +from rez.serialise import load_from_file, FileFormat +from rez.packages_ import create_package +from rez.exceptions import PackageMetadataError, InvalidPackageError +from rez.utils.syspath import add_sys_paths +from rez.utils.sourcecode import SourceCode +from rez.utils.logging_ import print_info, print_error +from inspect import isfunction +import os.path + + +class DeveloperPackage(Package): + """A developer package. + + This is a package in a source directory that is subsequently built or + released. + """ + def __init__(self, resource): + super(DeveloperPackage, self).__init__(resource) + self.filepath = None + + # include modules, derived from any present @include decorators + self.includes = None + + @classmethod + def from_path(cls, path): + """Load a developer package. + + A developer package may for example be a package.yaml or package.py in a + user's source directory. + + Args: + path: Directory containing the package definition file. + + Returns: + `Package` object. + """ + name = None + data = None + + for name_ in config.plugins.package_repository.filesystem.package_filenames: + for format_ in (FileFormat.py, FileFormat.yaml): + filepath = os.path.join(path, "%s.%s" % (name_, format_.extension)) + + if os.path.isfile(filepath): + with add_sys_paths(config.package_definition_build_python_paths): + data = load_from_file(filepath, format_) + break + if data: + name = data.get("name") + if name is not None or isinstance(name, basestring): + break + + if data is None: + raise PackageMetadataError("No package definition file found at %s" % path) + + if name is None or not isinstance(name, basestring): + raise PackageMetadataError( + "Error in %r - missing or non-string field 'name'" % filepath) + + package = create_package(name, data, package_cls=cls) + + # postprocessing + result = package._get_postprocessed(data) + + if result: + package, data = result + + package.filepath = filepath + + # find all includes, this is needed at install time to copy the right + # py sourcefiles into the package installation + package.includes = set() + + def visit(d): + for k, v in d.iteritems(): + if isinstance(v, SourceCode): + package.includes |= (v.get_includes() or set()) + elif isinstance(v, dict): + visit(v) + + visit(data) + + package._validate_includes() + + return package + + def _validate_includes(self): + if not self.includes: + return + + definition_python_path = self.config.package_definition_python_path + + if not definition_python_path: + raise InvalidPackageError( + "Package %s uses @include decorator, but no include path " + "has been configured with the 'package_definition_python_path' " + "setting." % self.filepath) + + for name in self.includes: + filepath = os.path.join(definition_python_path, name) + filepath += ".py" + + if not os.path.exists(filepath): + raise InvalidPackageError( + "@include decorator requests module '%s', but the file " + "%s does not exist." % (name, filepath)) + + def _get_postprocessed(self, data): + """ + Returns: + (DeveloperPackage, new_data) 2-tuple IFF the postprocess function + changed the package; otherwise None. + """ + from rez.serialise import process_python_objects + from rez.utils.data_utils import get_dict_diff + from copy import deepcopy + + with add_sys_paths(config.package_definition_build_python_paths): + postprocess = getattr(self, "postprocess", None) + + if postprocess: + postprocess_func = postprocess.func + print_info("Applying postprocess from package.py") + else: + # load globally configured postprocess function + dotted = self.config.package_postprocess_function + + if not dotted: + return None + + if '.' not in dotted: + print_error( + "Setting 'package_postprocess_function' must be of " + "form 'module[.module.module...].funcname'. Package " + "postprocessing has not been applied.") + return None + + name, funcname = dotted.rsplit('.', 1) + + try: + module = __import__(name=name, fromlist=[funcname]) + except Exception as e: + print_error("Failed to load postprocessing function '%s': %s" + % (dotted, str(e))) + return None + + setattr(module, "InvalidPackageError", InvalidPackageError) + postprocess_func = getattr(module, funcname) + + if not postprocess_func or not isfunction(isfunction): + print_error("Function '%s' not found" % dotted) + return None + + print_info("Applying postprocess function %s" % dotted) + + postprocessed_data = deepcopy(data) + + # apply postprocessing + try: + postprocess_func(this=self, data=postprocessed_data) + except InvalidPackageError: + raise + except Exception as e: + print_error("Failed to apply postprocess: %s: %s" + % (e.__class__.__name__, str(e))) + return None + + # if postprocess added functions, these need to be converted to + # SourceCode instances + postprocessed_data = process_python_objects(postprocessed_data) + + if postprocessed_data == data: + return None + + # recreate package from modified package data + package = create_package(self.name, postprocessed_data, + package_cls=self.__class__) + + # print summary of changed package attributes + added, removed, changed = get_dict_diff(data, postprocessed_data) + lines = ["Package attributes were changed in post processing:"] + + if added: + lines.append("Added attributes: %s" + % ['.'.join(x) for x in added]) + if removed: + lines.append("Removed attributes: %s" + % ['.'.join(x) for x in removed]) + if changed: + lines.append("Changed attributes: %s" + % ['.'.join(x) for x in changed]) + + txt = '\n'.join(lines) + print_info(txt) + + return package, postprocessed_data diff --git a/src/rez/exceptions.py b/src/rez/exceptions.py index 9e5adefaf..3dee4e987 100644 --- a/src/rez/exceptions.py +++ b/src/rez/exceptions.py @@ -159,6 +159,11 @@ class PackageRepositoryError(RezError): pass +class InvalidPackageError(RezError): + """A special case exception used in package 'postprocess function'.""" + pass + + class RezGuiQTImportError(ImportError): """A special case - see cli/gui.py """ diff --git a/src/rez/package_maker__.py b/src/rez/package_maker__.py index 7b9a0e7b2..daac7da69 100644 --- a/src/rez/package_maker__.py +++ b/src/rez/package_maker__.py @@ -47,7 +47,7 @@ class PackageMaker(AttrDictWrapper): """Utility class for creating packages.""" - def __init__(self, name, data=None): + def __init__(self, name, data=None, package_cls=None): """Create a package maker. Args: @@ -55,6 +55,7 @@ def __init__(self, name, data=None): """ super(PackageMaker, self).__init__(data) self.name = name + self.package_cls = package_cls or Package # set by `make_package` self.installed_variants = [] @@ -79,7 +80,8 @@ def get_package(self): family_resource = repo.get_package_family(self.name) it = repo.iter_packages(family_resource) package_resource = it.next() - package = Package(package_resource) + + package = self.package_cls(package_resource) # revalidate the package for extra measure package.validate_data() @@ -87,8 +89,11 @@ def get_package(self): def _get_data(self): data = self._data.copy() + data.pop("installed_variants", None) data.pop("skipped_variants", None) + data.pop("package_cls", None) + data = dict((k, v) for k, v in data.iteritems() if v is not None) return data diff --git a/src/rez/package_resources_.py b/src/rez/package_resources_.py index 7960125fb..b7e54ece3 100644 --- a/src/rez/package_resources_.py +++ b/src/rez/package_resources_.py @@ -1,8 +1,9 @@ from rez.utils.resources import Resource from rez.utils.schema import Required, schema_keys from rez.utils.logging_ import print_warning -from rez.utils.data_utils import cached_property, SourceCode, \ - AttributeForwardMeta, LazyAttributeMeta +from rez.utils.sourcecode import SourceCode +from rez.utils.data_utils import cached_property, AttributeForwardMeta, \ + LazyAttributeMeta from rez.utils.formatting import PackageRequest from rez.exceptions import PackageMetadataError, ResourceError from rez.config import config, Config, create_config diff --git a/src/rez/package_serialise.py b/src/rez/package_serialise.py index e692b54bf..316105294 100644 --- a/src/rez/package_serialise.py +++ b/src/rez/package_serialise.py @@ -3,7 +3,7 @@ from rez.package_resources_ import help_schema from rez.vendor.schema.schema import Schema, Optional, And, Or, Use from rez.vendor.version.version import Version -from rez.utils.data_utils import SourceCode +from rez.utils.sourcecode import SourceCode from rez.utils.formatting import PackageRequest, indent, \ dict_to_attributes_code, as_block_string from rez.utils.schema import Required @@ -119,13 +119,17 @@ def dump_package_data(data, buf, format_=FileFormat.py, skip_attributes=None): # instead we just comment out these comment actions - that way we can refer to # the package file to see what the original commands were, but they don't get # processed by rex. +# def _commented_old_command_annotations(sourcecode): lines = sourcecode.source.split('\n') for i, line in enumerate(lines): if line.startswith("comment('OLD COMMAND:"): lines[i] = "# " + line source = '\n'.join(lines) - return SourceCode(source) + + other = sourcecode.copy() + other.source = source + return other def _dump_package_data_yaml(items, buf): @@ -157,9 +161,8 @@ def _dump_package_data_py(items, buf): # source code becomes a python function if key in ("commands", "pre_commands", "post_commands"): value = _commented_old_command_annotations(value) - # don't indent code if already indented - source = value.source if value.source[0] in (' ', '\t') else indent(value.source) - txt = "def %s():\n%s" % (key, source) + + txt = value.to_text(funcname=key) elif isinstance(value, list) and len(value) > 1: # nice formatting for lists lines = ["%s = [" % key] @@ -185,8 +188,8 @@ def _dump_package_data_py(items, buf): print >> buf, '' -dump_functions = {FileFormat.py: _dump_package_data_py, - FileFormat.yaml: _dump_package_data_yaml} +dump_functions = {FileFormat.py: _dump_package_data_py, + FileFormat.yaml: _dump_package_data_yaml} # Copyright 2013-2016 Allan Johns. diff --git a/src/rez/packages_.py b/src/rez/packages_.py index d90e50191..d9a4a58d3 100644 --- a/src/rez/packages_.py +++ b/src/rez/packages_.py @@ -3,14 +3,13 @@ VariantResource, package_family_schema, package_schema, variant_schema, \ package_release_keys from rez.package_serialise import dump_package_data -from rez.utils.logging_ import print_info +from rez.utils.logging_ import print_info, print_error from rez.utils.data_utils import cached_property from rez.utils.formatting import StringFormatMixin, StringFormatType from rez.utils.filesystem import is_subdirectory from rez.utils.schema import schema_keys from rez.utils.resources import ResourceHandle, ResourceWrapper -from rez.exceptions import PackageMetadataError, PackageFamilyNotFoundError, \ - ResourceError +from rez.exceptions import PackageFamilyNotFoundError, ResourceError from rez.vendor.version.version import VersionRange from rez.vendor.version.requirement import VersionedObject from rez.serialise import load_from_file, FileFormat @@ -457,54 +456,11 @@ def get_package_from_string(txt, paths=None): def get_developer_package(path): - """Load a developer package. + from rez.developer_package import DeveloperPackage + return DeveloperPackage.from_path(path) - A developer package may for example be a package.yaml or package.py in a - user's source directory. - Note: - The resulting package has a 'filepath' attribute added to it, that does - not normally appear on a `Package` object. A developer package is the - only case where we know we can directly associate a 'package.*' file - with a package - other packages can come from any kind of package repo, - which may or may not associate a single file with a single package (or - any file for that matter - it may come from a database). - - Args: - path: Directory containing the package definition file. - - Returns: - `Package` object. - """ - name = data = None - for name_ in config.plugins.package_repository.filesystem.package_filenames: - for format_ in (FileFormat.py, FileFormat.yaml): - filepath = os.path.join(path, "%s.%s" % (name_, format_.extension)) - if os.path.isfile(filepath): - data = load_from_file(filepath, format_) - break - if data: - name = data.get("name") - if name is not None or isinstance(name, basestring): - break - - if data is None: - raise PackageMetadataError("No package definition file found at %s" % path) - - if name is None or not isinstance(name, basestring): - raise PackageMetadataError( - "Error in %r - missing or non-string field 'name'" % filepath) - - package = create_package(name, data) - package = _postprocess_package(package, data) - - # graft on developer-package-specific attributes - setattr(package, "filepath", filepath) - - return package - - -def create_package(name, data): +def create_package(name, data, package_cls=None): """Create a package given package data. Args: @@ -515,7 +471,7 @@ def create_package(name, data): `Package` object. """ from rez.package_maker__ import PackageMaker - maker = PackageMaker(name, data) + maker = PackageMaker(name, data, package_cls=package_cls) return maker.get_package() @@ -636,48 +592,6 @@ def get_latest_package(name, range_=None, paths=None, error=False): return None -def _postprocess_package(package, data): - postprocess = getattr(package, "postprocess", None) - if not postprocess: - return package - - from rez.serialise import process_python_objects - from rez.utils.data_utils import get_dict_diff - from copy import deepcopy - - postprocessed_data = deepcopy(data) - - # apply postprocessing - postprocess.func(this=package, data=postprocessed_data) - - # if postprocess added functions, these need to be converted to - # SourceCode instances - postprocessed_data = process_python_objects(postprocessed_data) - - if postprocessed_data != data: - # recreate package from modified package data - package = create_package(package.name, postprocessed_data) - - # print summary of changed package attributes - added, removed, changed = get_dict_diff(data, postprocessed_data) - lines = ["Package attributes were changed in post processing:"] - - if added: - lines.append("Added attributes: %s" - % ['.'.join(x) for x in added]) - if removed: - lines.append("Removed attributes: %s" - % ['.'.join(x) for x in removed]) - if changed: - lines.append("Changed attributes: %s" - % ['.'.join(x) for x in changed]) - - txt = '\n'.join(lines) - print_info(txt) - - return package - - def _get_families(name, paths=None): entries = [] for path in (paths or config.packages_path): diff --git a/src/rez/resolved_context.py b/src/rez/resolved_context.py index 41300fdde..5730d6a3b 100644 --- a/src/rez/resolved_context.py +++ b/src/rez/resolved_context.py @@ -1500,6 +1500,7 @@ def _minor_heading(txt): executor.bind('root', pkg.root) executor.bind('base', pkg.base) + """ # show a meaningful filename in traceback if an error occurs. # Can't use an actual filepath because (a) the package may not # have one and (b) it might be a yaml file (line numbers would @@ -1521,6 +1522,14 @@ def _minor_heading(txt): except error_class as e: exc = e trace = traceback.format_exc() + """ + + exc = None + trace = None + commands.set_package(pkg) + + try: + executor.execute_code(commands) except error_class as e: exc = e trace = traceback.format_exc() diff --git a/src/rez/rex.py b/src/rez/rex.py index ca8138681..9cfec0dc2 100644 --- a/src/rez/rex.py +++ b/src/rez/rex.py @@ -10,6 +10,7 @@ from rez.config import config from rez.exceptions import RexError, RexUndefinedVariableError, RezSystemError from rez.util import shlex_join +from rez.utils.sourcecode import SourceCode from rez.utils.data_utils import AttrDictWrapper from rez.utils.formatting import expandvars from rez.vendor.enum import Enum @@ -1119,7 +1120,7 @@ def compile_code(cls, code, filename=None, exec_namespace=None): """Compile and possibly execute rex code. Args: - code (str): The python code to compile. + code (str or SourceCode): The python code to compile. filename (str): File to associate with the code, will default to ''. namespace (dict): Namespace to execute the code in. If None, the @@ -1133,7 +1134,10 @@ def compile_code(cls, code, filename=None, exec_namespace=None): # compile try: - pyc = compile(code, filename, 'exec') + if isinstance(code, SourceCode): + pyc = code.compiled + else: + pyc = compile(code, filename, 'exec') except error_class as e: # trim trace down to only what's interesting msg = str(e) @@ -1152,7 +1156,10 @@ def compile_code(cls, code, filename=None, exec_namespace=None): # execute if exec_namespace is not None: try: - exec pyc in exec_namespace + if isinstance(code, SourceCode): + code.exec_(globals_=exec_namespace) + else: + exec pyc in exec_namespace except error_class as e: # trim trace down to only what's interesting import traceback @@ -1160,13 +1167,14 @@ def compile_code(cls, code, filename=None, exec_namespace=None): frames = [x for x in frames if x[0] == filename] cls._patch_frames(frames, code, filename) cls._raise_rex_error(frames, e) + return pyc def execute_code(self, code, filename=None): """Execute code within the execution context. Args: - code (str): Rex code to execute. + code (str or SourceCode): Rex code to execute. filename (str): Filename to report if there are syntax errors. """ self.compile_code(code=code, diff --git a/src/rez/rezconfig.py b/src/rez/rezconfig.py index 078fe5d75..d890582f1 100644 --- a/src/rez/rezconfig.py +++ b/src/rez/rezconfig.py @@ -58,7 +58,6 @@ # is highly recommended that this be set to local storage, such as /tmp. tmpdir = None - # Where temporary files for contexts go. Defaults to appropriate path depending # on your system - for example, *nix distributions will probably set this to "/tmp". # This is separate to 'tmpdir' because you sometimes might want to set this to an @@ -67,6 +66,45 @@ # cleaned up when the render completes. context_tmpdir = None +# These are extra python paths that are added to sys.path **only during a build**. +# This means that any of the functions in the following list can import modules +# from these paths: +# * The *postprocess* function; +# * Any function decorated with @harden (TODO) - these get evaluated at build time. +# +# You can use this to provide common code to your package definition files during +# a build. To provide common code for packages to use at resolve time instead (for +# example, in a *commands* function) see the following +# *package_definition_python_path* setting. +# +package_definition_build_python_paths = [] + +# This is the directory from which installed packages can import modules. This +# is a way for packages to use shared code. +# +# This is NOT a standard path added to sys.path. Packages that use modules from +# within this directory need to explicitly name them. Furthermore, modules that +# a package uses are copied into that package's install - this ensures that the +# package remains standalone and that changes to the shared code will not break +# or alter existing package installs. +# +# Consider the setting: +# +# package_definition_python_path = "/src/rezutils" +# +# Consider also the following package *commands* function: +# +# @include("utils") +# def commands(): +# utils.do_some_common_thing(this) +# +# This package will import the code from */src/rezutils/utils.py* (or more +# specifically, its copy of this sourcefile) and will bind it to the name *utils*. +# +# For further information, see [here](Package-Definition-Guide#using-shared-code). +# +package_definition_python_path = None + ############################################################################### # Extensions @@ -319,6 +357,39 @@ # scripts (such as .bashrc). If False, package commands are sourced after. package_commands_sourced_first = True +# If you define this function, it will be called as the *postprocess function* +# on every package that does not provide its own, as part of the build process. +# The given function must be made available by setting the value of +# *package_definition_build_python_paths* appropriately. +# +# For example, consider the settings: +# +# package_definition_build_python_paths = ["/src/rezutils"] +# package_postprocess_function = "build.validate" +# +# This would use the 'validate' function in the sourcefile /src/rezutils/build.py +# to postprocess every package definition file that does not define its own +# postprocess function. +# +# If the postprocess function raises an exception, an error message is printed, +# and the postprocessing is not applied to the package. However, if the +# *InvalidPackageError* exception is raised, the build is aborted. +# +# You would typically use this to perform common validation or modification of +# packages. For example, your common postprocess function might check that the +# package name matches a regex. Here's what that might look like: +# +# # in /src/rezutils/build.py +# import re +# from rez.exceptions import InvalidPackageError +# +# def validate(package, data): +# regex = re.compile("(a-zA-Z_)+$") +# if not regex.match(package.name): +# raise InvalidPackageError("Invalid package name.") +# +package_postprocess_function = None + ############################################################################### # Debugging diff --git a/src/rez/serialise.py b/src/rez/serialise.py index b70210c06..7383362f9 100644 --- a/src/rez/serialise.py +++ b/src/rez/serialise.py @@ -2,11 +2,12 @@ Read and write data from file. File caching via a memcached server is supported. """ from rez.utils.scope import ScopeContext -from rez.utils.data_utils import SourceCode +from rez.utils.sourcecode import SourceCode, early, include from rez.utils.logging_ import print_debug from rez.utils.filesystem import TempDirs -from rez.exceptions import ResourceError +from rez.exceptions import ResourceError, InvalidPackageError from rez.utils.memcached import memcached +from rez.utils.syspath import add_sys_paths from rez.config import config from rez.vendor.enum import Enum from rez.vendor import yaml @@ -133,7 +134,11 @@ def load_py(stream, filepath=None): dict. """ scopes = ScopeContext() - g = dict(scope=scopes) + + g = dict(scope=scopes, + early=early, + include=include, + InvalidPackageError=InvalidPackageError) try: exec stream in g @@ -143,30 +148,39 @@ def load_py(stream, filepath=None): while filepath and frames and frames[0][0] != filepath: frames = frames[1:] - msg = str(e) + msg = "Problem loading %s: %s" % (filepath, str(e)) stack = ''.join(traceback.format_list(frames)).strip() if stack: msg += ":\n" + stack raise ResourceError(msg) result = {} - excludes = set(('scope', '__builtins__')) + excludes = set(('scope', 'InvalidPackageError', '__builtins__', + 'early', 'include')) + for k, v in g.iteritems(): if k not in excludes and \ (k not in __builtins__ or __builtins__[k] != v): result[k] = v result.update(scopes.to_dict()) - result = process_python_objects(result) + result = process_python_objects(result, filepath=filepath) return result -def process_python_objects(data): +def process_python_objects(data, filepath=None): for k, v in data.iteritems(): if isfunction(v): - data[k] = SourceCode.from_function(v) + if hasattr(v, "_early"): + # run the function now, and replace with return value + with add_sys_paths(config.package_definition_build_python_paths): + value = v() + else: + value = SourceCode.from_function(v, filepath=filepath) + + data[k] = value elif isinstance(v, dict): - process_python_objects(v) + process_python_objects(v, filepath=filepath) return data diff --git a/src/rez/tests/test_packages.py b/src/rez/tests/test_packages.py index eca87ef0f..836bd39ee 100644 --- a/src/rez/tests/test_packages.py +++ b/src/rez/tests/test_packages.py @@ -7,7 +7,7 @@ from rez.package_repository import create_memory_package_repository from rez.tests.util import TestBase, TempdirMixin from rez.utils.formatting import PackageRequest -from rez.utils.data_utils import SourceCode +from rez.utils.sourcecode import SourceCode import rez.vendor.unittest2 as unittest from rez.vendor.version.version import Version import os.path diff --git a/src/rez/util.py b/src/rez/util.py index 65ba89d81..5a9e0a1f1 100644 --- a/src/rez/util.py +++ b/src/rez/util.py @@ -45,7 +45,7 @@ def create_executable_script(filepath, body, program=None): """ program = program or "python" if callable(body): - from rez.utils.data_utils import SourceCode + from rez.utils.sourcecode import SourceCode code = SourceCode.from_function(body) body = code.source diff --git a/src/rez/utils/data_utils.py b/src/rez/utils/data_utils.py index 9ef0257c6..da4132ea8 100644 --- a/src/rez/utils/data_utils.py +++ b/src/rez/utils/data_utils.py @@ -4,9 +4,7 @@ from rez.vendor.schema.schema import Schema, Optional from rez.exceptions import RexError from collections import MutableMapping -from inspect import getsourcelines, getargspec from threading import Lock -from textwrap import dedent class _Missing: pass @@ -54,60 +52,6 @@ def _diff(d1_, d2_, namespace): return _diff(d1, d2, []) -class SourceCode(object): - """Very simple wrapper for python source code.""" - def __init__(self, source, func=None): - self.source = source.rstrip() - self.func = func - - @classmethod - def from_function(cls, func): - # get txt of function body - loc = getsourcelines(func)[0][1:] - code = dedent(''.join(loc)) - - # align lines that start with a comment (#) - codelines = code.split('\n') - linescount = len(codelines) - for i, line in enumerate(codelines): - if line.startswith('#'): - nextindex = i+1 if i < linescount else i-1 - nextline = codelines[nextindex] - while nextline.startswith('#'): - nextline = codelines[nextindex] - nextindex = nextindex+1 if nextindex < linescount else nextindex-1 - firstchar = len(nextline)-len(nextline.lstrip()) - codelines[i] = '%s%s' % (nextline[:firstchar], line) - - code = '\n'.join(codelines).rstrip() - code = dedent(code) - - value = SourceCode.__new__(SourceCode) - value.source = code - value.func = func - return value - - def corrected_for_indent(self): - if self.source and self.source[0] in (' ', '\t'): - new_source = "if True:\n" + self.source - return SourceCode(new_source) - else: - return self - - def __eq__(self, other): - return (isinstance(other, SourceCode) - and other.source == self.source) - - def __ne__(self, other): - return not (other == self) - - def __str__(self): - return self.source - - def __repr__(self): - return "%s(%r)" % (self.__class__.__name__, self.source) - - class cached_property(object): """Simple property caching descriptor. diff --git a/src/rez/utils/filesystem.py b/src/rez/utils/filesystem.py index 66cd10054..082914641 100644 --- a/src/rez/utils/filesystem.py +++ b/src/rez/utils/filesystem.py @@ -54,7 +54,7 @@ def clear(self): self.dirs = set() for path in dirs: - if os.path.exists(path): + if os.path.exists(path) and not os.getenv("REZ_KEEP_TMPDIRS"): shutil.rmtree(path) @classmethod @@ -82,6 +82,17 @@ def retain_cwd(): os.chdir(cwd) +def safe_makedirs(path): + # makedirs that takes into account that multiple threads may try to make + # the same dir at the same time + if not os.path.exists(path): + try: + os.makedirs(path) + except OSError: + if not os.path.exists(path): + raise + + def is_subdirectory(path_a, path_b): """Returns True if `path_a` is a subdirectory of `path_b`.""" path_a = os.path.realpath(path_a) diff --git a/src/rez/utils/sourcecode.py b/src/rez/utils/sourcecode.py new file mode 100644 index 000000000..6dd6ee5d1 --- /dev/null +++ b/src/rez/utils/sourcecode.py @@ -0,0 +1,238 @@ +from rez.utils.formatting import indent +from rez.utils.logging_ import print_debug +from inspect import getsourcelines +from textwrap import dedent +from glob import glob +import os.path +import imp + + +def early(): + """Used by functions in package.py to harden to the return value at build time. + + The term 'early' refers to the fact these package attribute are boud 'early', + as opposed to 'late' bindings, which evaluate lazily. + """ + def decorated(fn): + setattr(fn, "_early", True) + return fn + + return decorated + + +def include(module_name, *module_names): + """Used by functions in package.py to have access to named modules. + + See the 'package_definition_python_path' config setting for more info. + """ + def decorated(fn): + _add_decorator(fn, "include", nargs=[module_name] + list(module_names)) + return fn + + return decorated + + +def _add_decorator(fn, name, **kwargs): + if not hasattr(fn, "_decorators"): + setattr(fn, "_decorators", []) + + kwargs.update({"name": name}) + fn._decorators.append(kwargs) + + +class SourceCode(object): + """Wrapper for python source code. + + This object is aware of the decorators defined in this sourcefile (such as + 'include') and deals with them appropriately. + """ + def __init__(self, source, func=None, filepath=None): + self.source = source.rstrip() + self.func = func + self.filepath = filepath + self.package = None + self.pyc = None + + def copy(self): + other = SourceCode(source=self.source, func=self.func) + return other + + @classmethod + def from_function(cls, func, filepath=None): + # get txt of function body. Skips sig and any decorators. Assumes that + # only the decorators in this file (such as 'include') are used. + num_decorators = len(getattr(func, "_decorators", [])) + loc = getsourcelines(func)[0][num_decorators + 1:] + code = dedent(''.join(loc)) + + # align lines that start with a comment (#) + codelines = code.split('\n') + linescount = len(codelines) + + for i, line in enumerate(codelines): + if line.startswith('#'): + nextindex = i + 1 if i < linescount else i - 1 + nextline = codelines[nextindex] + + while nextline.startswith('#'): + nextline = codelines[nextindex] + nextindex = (nextindex + 1 if nextindex < linescount + else nextindex - 1) + + firstchar = len(nextline) - len(nextline.lstrip()) + codelines[i] = '%s%s' % (nextline[:firstchar], line) + + code = '\n'.join(codelines).rstrip() + code = dedent(code) + + value = SourceCode.__new__(SourceCode) + value.source = code + value.func = func + value.filepath = filepath + value.package = None + value.pyc = None + + return value + + @property + def compiled(self): + if self.pyc is not None: + return self.pyc + + code = self.source + if code and code[0] in (' ', '\t'): + code = "if True:\n" + code + + if self.filepath: + filename = self.filepath + else: + filename = "" + + if self.func: + filename += ":%s" % self.func.__name__ + + self.pyc = compile(code, filename, 'exec') + return self.pyc + + def set_package(self, package): + # this is needed to load @included modules + self.package = package + + def exec_(self, globals_): + # bind import modules + if self.package is not None: + module_names = self.get_includes() + if module_names: + globals_ = globals_.copy() + + for name in module_names: + module = include_module_manager.load_module(name, self.package) + globals_[name] = module + + # exec + pyc = self.pyc + exec pyc in globals_ + + def to_text(self, funcname): + # don't indent code if already indented + if self.source[0] in (' ', '\t'): + source = self.source + else: + source = indent(self.source) + + txt = "def %s():\n%s" % (funcname, source) + + if self.func and hasattr(self.func, "_decorators"): + for entry in self.func._decorators: + nargs_str = ", ".join(map(repr, entry.get("nargs", []))) + name_str = entry.get("name") + sig = "@%s(%s)" % (name_str, nargs_str) + + txt = sig + '\n' + txt + + return txt + + def get_includes(self): + info = self._get_decorator_info("include") + if not info: + return None + + return set(info.get("nargs", [])) + + def _get_decorator_info(self, name): + if not self.func: + return None + + if not hasattr(self.func, "_decorators"): + return None + + matches = [x for x in self.func._decorators if x.get("name") == name] + if not matches: + return None + + return matches[0] + + """ + def corrected_for_indent(self): + if self.source and self.source[0] in (' ', '\t'): + new_source = "if True:\n" + self.source + return SourceCode(new_source) + else: + return self + """ + + def __eq__(self, other): + return (isinstance(other, SourceCode) + and other.source == self.source) + + def __ne__(self, other): + return not (other == self) + + def __str__(self): + return self.source + + def __repr__(self): + return "%s(%r)" % (self.__class__.__name__, self.source) + + +class IncludeModuleManager(object): + """Manages a cache of modules imported via '@include' decorator. + """ + + # subdirectory under package 'base' path where we expect to find copied + # sourcefiles referred to by the '@include' function decorator. + # + include_modules_subpath = ".rez/include" + + def __init__(self): + self.modules = {} + + def load_module(self, name, package): + from rez.config import config # avoiding circular import + + path = os.path.join(package.base, self.include_modules_subpath) + pathname = os.path.join(path, "%s-*.py" % name) + + pathnames = glob(pathname) + if not pathnames: + return None + + filepath = pathnames[0] + hash_str = filepath.rsplit('-', 1)[-1].split('.', 1)[0] + + module = self.modules.get(hash_str) + if module is not None: + return module + + if config.debug("file_loads"): + print_debug("Loading include sourcefile: %s" % filepath) + + with open(filepath) as f: + module = imp.load_source(name, filepath, f) + + self.modules[hash_str] = module + return module + + +# singleton +include_module_manager = IncludeModuleManager() diff --git a/src/rez/utils/syspath.py b/src/rez/utils/syspath.py new file mode 100644 index 000000000..88f00a6bd --- /dev/null +++ b/src/rez/utils/syspath.py @@ -0,0 +1,15 @@ +from contextlib import contextmanager +import sys + + +@contextmanager +def add_sys_paths(paths): + """Add to sys.path, and revert on scope exit. + """ + original_syspath = sys.path[:] + sys.path.extend(paths) + + try: + yield + finally: + sys.path = original_syspath diff --git a/src/rez/utils/yaml.py b/src/rez/utils/yaml.py index 36e27ea34..b34277cf2 100644 --- a/src/rez/utils/yaml.py +++ b/src/rez/utils/yaml.py @@ -1,4 +1,4 @@ -from rez.utils.data_utils import SourceCode +from rez.utils.sourcecode import SourceCode from rez.vendor import yaml from rez.vendor.yaml.dumper import SafeDumper from rez.vendor.yaml.nodes import ScalarNode, MappingNode diff --git a/src/rezplugins/build_process/local.py b/src/rezplugins/build_process/local.py index c44a317c9..57793077b 100644 --- a/src/rezplugins/build_process/local.py +++ b/src/rezplugins/build_process/local.py @@ -1,13 +1,18 @@ """ Builds packages on local host """ +from rez.config import config from rez.package_repository import package_repository_manager from rez.build_process_ import BuildProcessHelper, BuildType from rez.release_hook import ReleaseHookEvent from rez.exceptions import BuildError, ReleaseError from rez.utils.colorize import Printer, warning +from rez.utils.filesystem import safe_makedirs +from rez.utils.sourcecode import IncludeModuleManager +from hashlib import sha1 import shutil import os +import os.path class LocalBuildProcess(BuildProcessHelper): @@ -30,6 +35,10 @@ def build(self, install_path=None, clean=False, install=False, variants=None): clean=clean, install=install) + # install include modules, if any + if install: + self._install_include_modules(install_path) + if None not in build_env_scripts: self._print("\nThe following executable script(s) have been created:") self._print('\n'.join(build_env_scripts)) @@ -109,11 +118,11 @@ def _build_variant_base(self, variant, build_type, install_path=None, # create directories (build, install) if clean and os.path.exists(variant_build_path): shutil.rmtree(variant_build_path) - if not os.path.exists(variant_build_path): - os.makedirs(variant_build_path) - if install and not os.path.exists(variant_install_path): - os.makedirs(variant_install_path) + safe_makedirs(variant_build_path) + + if install: + safe_makedirs(variant_build_path) # create build environment context, rxt_filepath = self.create_build_context( @@ -124,6 +133,7 @@ def _build_variant_base(self, variant, build_type, install_path=None, # run build system build_system_name = self.build_system.name() self._print("\nInvoking %s build system...", build_system_name) + build_result = self.build_system.build( context=context, variant=variant, @@ -143,6 +153,30 @@ def _build_variant_base(self, variant, build_type, install_path=None, return build_result + def _install_include_modules(self, install_path): + # install 'include' sourcefiles, used by funcs decorated with @include + if not self.package.includes: + return + + install_path = install_path or self.package.config.local_packages_path + base_path = self.get_package_install_path(install_path) + + path = os.path.join(base_path, IncludeModuleManager.include_modules_subpath) + safe_makedirs(path) + + definition_python_path = self.package.config.package_definition_python_path + + for name in self.package.includes: + filepath = os.path.join(definition_python_path, name) + ".py" + + with open(filepath) as f: + txt = f.read().strip() + + uuid = sha1(txt).hexdigest() + dest_filepath = os.path.join(path, "%s-%s.py" % (name, uuid)) + + shutil.copy(filepath, dest_filepath) + def _build_variant(self, variant, install_path=None, clean=False, install=False, **kwargs): if variant.index is not None: diff --git a/src/support/package_utils/README b/src/support/package_utils/README new file mode 100644 index 000000000..5728c856a --- /dev/null +++ b/src/support/package_utils/README @@ -0,0 +1,4 @@ + +This directory contains code that you might want to use for package.py +postprocessing, or import/include in build/runtime functions in you package +definitions. diff --git a/src/support/package_utils/get_committers.sh b/src/support/package_utils/get_committers.sh new file mode 100644 index 000000000..178056507 --- /dev/null +++ b/src/support/package_utils/get_committers.sh @@ -0,0 +1,16 @@ +#!/bin/bash +# +# Prints a list of users who have committed to the repo in cwd, in decreasing +# order of commit count. +# +# Supports: +# * git +# +set -e + +# git +git status &> /dev/null +if [ $? -eq 0 ]; then + git shortlog -sn | cut -f2 + exit 0 +fi diff --git a/src/support/package_utils/utils.py b/src/support/package_utils/utils.py new file mode 100644 index 000000000..f79c494ad --- /dev/null +++ b/src/support/package_utils/utils.py @@ -0,0 +1,21 @@ +import os.path +import subprocess + + +def set_authors(data): + """Add 'authors' attribute based on repo contributions + """ + if "authors" in data: + return + + shfile = os.path.join(os.path.dirname(__file__), "get_committers.sh") + + p = subprocess.Popen(["bash", shfile], stdout=subprocess.PIPE) + out, _ = p.communicate() + if p.returncode: + return + + authors = out.strip().split('\n') + authors = [x.strip() for x in authors] + + data["authors"] = authors From 7a94313ae4f62d3514c54dd1bfcc05a3e8060c38 Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 18 Feb 2017 08:11:35 +1100 Subject: [PATCH 031/124] -WIP, late binding almost done --- src/rez/developer_package.py | 5 +- src/rez/package_maker__.py | 5 +- src/rez/package_resources_.py | 11 ++- src/rez/package_serialise.py | 4 +- src/rez/packages_.py | 48 +++++++++++- src/rez/resolved_context.py | 7 +- src/rez/rex.py | 32 +++++--- src/rez/rezconfig.py | 2 +- src/rez/serialise.py | 14 +++- src/rez/utils/data_utils.py | 11 ++- src/rez/utils/sourcecode.py | 133 +++++++++++++++++++++++++--------- 11 files changed, 210 insertions(+), 62 deletions(-) diff --git a/src/rez/developer_package.py b/src/rez/developer_package.py index db50ce776..fd9e1d581 100644 --- a/src/rez/developer_package.py +++ b/src/rez/developer_package.py @@ -44,8 +44,7 @@ def from_path(cls, path): filepath = os.path.join(path, "%s.%s" % (name_, format_.extension)) if os.path.isfile(filepath): - with add_sys_paths(config.package_definition_build_python_paths): - data = load_from_file(filepath, format_) + data = load_from_file(filepath, format_) break if data: name = data.get("name") @@ -76,7 +75,7 @@ def from_path(cls, path): def visit(d): for k, v in d.iteritems(): if isinstance(v, SourceCode): - package.includes |= (v.get_includes() or set()) + package.includes |= (v.includes or set()) elif isinstance(v, dict): visit(v) diff --git a/src/rez/package_maker__.py b/src/rez/package_maker__.py index daac7da69..df2ad05eb 100644 --- a/src/rez/package_maker__.py +++ b/src/rez/package_maker__.py @@ -3,7 +3,8 @@ from rez.utils.formatting import PackageRequest from rez.utils.data_utils import AttrDictWrapper from rez.utils.logging_ import print_warning -from rez.package_resources_ import help_schema, _commands_schema, _function_schema +from rez.package_resources_ import help_schema, _commands_schema, \ + _function_schema, late_bound from rez.package_repository import create_memory_package_repository from rez.packages_ import Package from rez.vendor.schema.schema import Schema, Optional, Or, Use, And @@ -31,7 +32,7 @@ Optional('uuid'): basestring, Optional('config'): dict, - Optional('tools'): [basestring], + Optional('tools'): late_bound([basestring]), Optional('help'): help_schema, Optional('pre_commands'): _commands_schema, diff --git a/src/rez/package_resources_.py b/src/rez/package_resources_.py index b7e54ece3..d6d8502ea 100644 --- a/src/rez/package_resources_.py +++ b/src/rez/package_resources_.py @@ -37,6 +37,11 @@ help_schema = Or(basestring, # single help entry [[basestring]]) # multiple help entries +_is_late = And(SourceCode, lambda x: hasattr(x, "_late")) + +def late_bound(schema): + return Or(SourceCode, schema) + #------------------------------------------------------------------------------ # schema dicts @@ -73,7 +78,7 @@ # general Optional('uuid'): basestring, Optional('config'): Config, - Optional('tools'): [basestring], + Optional('tools'): late_bound([basestring]), Optional('help'): help_schema, # commands @@ -133,10 +138,8 @@ _package_request_schema = And(basestring, Use(PackageRequest)) - package_pod_schema_dict = base_resource_schema_dict.copy() - large_string_dict = And(basestring, Use(lambda x: dedent(x).strip())) @@ -157,7 +160,7 @@ Optional('uuid'): basestring, Optional('config'): And(dict, Use(lambda x: create_config(overrides=x))), - Optional('tools'): [basestring], + Optional('tools'): late_bound([basestring]), Optional('help'): help_schema, Optional('pre_commands'): _commands_schema, diff --git a/src/rez/package_serialise.py b/src/rez/package_serialise.py index 316105294..b8ecd5d5b 100644 --- a/src/rez/package_serialise.py +++ b/src/rez/package_serialise.py @@ -1,6 +1,6 @@ from rez.vendor import yaml from rez.serialise import FileFormat -from rez.package_resources_ import help_schema +from rez.package_resources_ import help_schema, late_bound from rez.vendor.schema.schema import Schema, Optional, And, Or, Use from rez.vendor.version.version import Version from rez.utils.sourcecode import SourceCode @@ -54,7 +54,7 @@ Optional("version"): version_schema, Optional("description"): basestring, Optional("authors"): [basestring], - Optional("tools"): [basestring], + Optional("tools"): late_bound([basestring]), Optional('requires'): [package_request_schema], Optional('build_requires'): [package_request_schema], diff --git a/src/rez/packages_.py b/src/rez/packages_.py index d9a4a58d3..9a37eb477 100644 --- a/src/rez/packages_.py +++ b/src/rez/packages_.py @@ -4,7 +4,8 @@ package_release_keys from rez.package_serialise import dump_package_data from rez.utils.logging_ import print_info, print_error -from rez.utils.data_utils import cached_property +from rez.utils.sourcecode import SourceCode +from rez.utils.data_utils import cached_property, _missing from rez.utils.formatting import StringFormatMixin, StringFormatType from rez.utils.filesystem import is_subdirectory from rez.utils.schema import schema_keys @@ -59,6 +60,10 @@ def iter_packages(self): class PackageBaseResourceWrapper(PackageRepositoryResourceWrapper): """Abstract base class for `Package` and `Variant`. """ + def __init__(self, *nargs, **kwargs): + super(PackageBaseResourceWrapper, self).__init__(*nargs, **kwargs) + self._late_bindings = {} + def arbitrary_keys(self): raise NotImplementedError @@ -114,6 +119,29 @@ def print_info(self, buf=None, format_=FileFormat.yaml, dump_package_data(data, buf=buf, format_=format_, skip_attributes=skip_attributes) + def _wrap_forwarded(self, key, value): + if isinstance(value, SourceCode) and value.late_binding: + value_ = self._late_bindings.get(key, _missing) + + if value_ is _missing: + value_ = self._eval_late_binding(value) + self._late_bindings[key] = value_ + + return value_ + else: + return value + + def _eval_late_binding(self, sourcecode, globals_={}): + g = { + "this": self, + "in_context": lambda: False + } + + g.update(globals_) + + sourcecode.set_package(self) + return sourcecode.exec_(globals_=g) + class Package(PackageBaseResourceWrapper): """A package. @@ -204,6 +232,10 @@ class Variant(PackageBaseResourceWrapper): def __init__(self, resource): _check_class(resource, VariantResource) super(Variant, self).__init__(resource) + self.context = None + + def set_context(self, context): + self.context = context # arbitrary keys def __getattr__(self, name): @@ -290,6 +322,17 @@ def install(self, path, dry_run=False, overrides=None): else: return Variant(resource) + def _eval_late_binding(self, sourcecode): + g = {} + + if self.context is None: + g["in_context"] = lambda: False + else: + g["in_context"] = lambda: True + g["context"] = self.context + + return super(Variant, self)._eval_late_binding(sourcecode, globals_=g) + class PackageSearchPath(object): """A list of package repositories. @@ -482,12 +525,15 @@ def get_variant(variant_handle): variant_handle (`ResourceHandle` or dict): Resource handle, or equivalent serialized dict representation from ResourceHandle.to_dict + context (`ResolvedContext`): The context this variant is associated + with, if any. Returns: `Variant`. """ if isinstance(variant_handle, dict): variant_handle = ResourceHandle.from_dict(variant_handle) + variant_resource = package_repository_manager.get_resource_from_handle(variant_handle) variant = Variant(variant_resource) return variant diff --git a/src/rez/resolved_context.py b/src/rez/resolved_context.py index 5730d6a3b..a64c7df27 100644 --- a/src/rez/resolved_context.py +++ b/src/rez/resolved_context.py @@ -257,7 +257,11 @@ def __init__(self, package_requests, verbosity=0, timestamp=None, self.from_cache = resolver.from_cache if self.status_ == ResolverStatus.solved: - self._resolved_packages = resolver.resolved_packages + self._resolved_packages = [] + + for variant in resolver.resolved_packages: + variant.set_context(self) + self._resolved_packages.append(variant) def __str__(self): request = self.requested_packages(include_implicit=True) @@ -1333,6 +1337,7 @@ def _print_version(value): variant_handle = convert_old_variant_handle(variant_handle) variant = get_variant(variant_handle) + variant.set_context(r) r._resolved_packages.append(variant) # -- SINCE SERIALIZE VERSION 1 diff --git a/src/rez/rex.py b/src/rez/rex.py index 9cfec0dc2..7e1c594d0 100644 --- a/src/rez/rex.py +++ b/src/rez/rex.py @@ -1129,6 +1129,16 @@ def compile_code(cls, code, filename=None, exec_namespace=None): Returns: Compiled code object. """ + if isinstance(code, SourceCode): + evaluated_code = code.evaluated_code + funcname = code.function_name + + if filename is None: + filename = code.sourcename + else: + evaluated_code = code + funcname = "" + filename = filename or "" error_class = Exception if config.catch_rex_errors else None @@ -1146,7 +1156,7 @@ def compile_code(cls, code, filename=None, exec_namespace=None): if match: try: lineno = int(match.groups()[0]) - loc = code.split('\n') + loc = evaluated_code.split('\n') line = loc[lineno - 1] msg += "\n %s" % line except: @@ -1165,8 +1175,8 @@ def compile_code(cls, code, filename=None, exec_namespace=None): import traceback frames = traceback.extract_tb(sys.exc_traceback) frames = [x for x in frames if x[0] == filename] - cls._patch_frames(frames, code, filename) - cls._raise_rex_error(frames, e) + cls._patch_frames(frames, evaluated_code, filename, funcname) + cls._raise_rex_error(frames, e, sourcename=filename) return pyc @@ -1206,7 +1216,7 @@ def execute_function(self, func, *nargs, **kwargs): filepath = inspect.getfile(func) if os.path.exists(filepath): frames = [x for x in frames if x[0] == filepath] - self._raise_rex_error(frames, e) + self._raise_rex_error(frames, e, sourcename=func.__name__) def get_output(self, style=OutputStyle.file): """Returns the result of all previous calls to execute_code.""" @@ -1216,7 +1226,7 @@ def expand(self, value): return self.formatter.format(str(value)) @classmethod - def _patch_frames(cls, frames, code, codefile=None): + def _patch_frames(cls, frames, code, codefile=None, funcname=None): """Patch traceback's frame objects to add lines of code from `code` where appropriate. """ @@ -1227,19 +1237,23 @@ def _patch_frames(cls, frames, code, codefile=None): if filename == codefile and line is None: try: line = loc[lineno - 1].strip() - frames[i] = (filename, lineno, "", line) + funcname = funcname or "" + frames[i] = (filename, lineno, funcname, line) except: pass @classmethod - def _raise_rex_error(cls, frames, e): + def _raise_rex_error(cls, frames, e, sourcename=None): import traceback + stack = ''.join(traceback.format_list(frames)).strip() + sourcename = sourcename or "rex code" + if isinstance(e, RexError): raise type(e)("%s\n%s" % (str(e), stack)) else: - raise RexError("Error in rex code: %s - %s\n%s" - % (e.__class__.__name__, str(e), stack)) + raise RexError("Error in %s: %s - %s\n%s" + % (sourcename, e.__class__.__name__, str(e), stack)) # Copyright 2013-2016 Allan Johns. diff --git a/src/rez/rezconfig.py b/src/rez/rezconfig.py index d890582f1..a0b07ace2 100644 --- a/src/rez/rezconfig.py +++ b/src/rez/rezconfig.py @@ -70,7 +70,7 @@ # This means that any of the functions in the following list can import modules # from these paths: # * The *postprocess* function; -# * Any function decorated with @harden (TODO) - these get evaluated at build time. +# * Any function decorated with @early - these get evaluated at build time. # # You can use this to provide common code to your package definition files during # a build. To provide common code for packages to use at resolve time instead (for diff --git a/src/rez/serialise.py b/src/rez/serialise.py index 7383362f9..f1d20bf9d 100644 --- a/src/rez/serialise.py +++ b/src/rez/serialise.py @@ -2,7 +2,7 @@ Read and write data from file. File caching via a memcached server is supported. """ from rez.utils.scope import ScopeContext -from rez.utils.sourcecode import SourceCode, early, include +from rez.utils.sourcecode import SourceCode, early, late, include from rez.utils.logging_ import print_debug from rez.utils.filesystem import TempDirs from rez.exceptions import ResourceError, InvalidPackageError @@ -137,6 +137,7 @@ def load_py(stream, filepath=None): g = dict(scope=scopes, early=early, + late=late, include=include, InvalidPackageError=InvalidPackageError) @@ -156,7 +157,7 @@ def load_py(stream, filepath=None): result = {} excludes = set(('scope', 'InvalidPackageError', '__builtins__', - 'early', 'include')) + 'early', 'late', 'include')) for k, v in g.iteritems(): if k not in excludes and \ @@ -176,6 +177,15 @@ def process_python_objects(data, filepath=None): with add_sys_paths(config.package_definition_build_python_paths): value = v() else: + """ + print + print 'XXXXXXXXXXXXXXXXXXXXXX', v + from inspect import getsourcelines + loc = getsourcelines(v)[0] + code = (''.join(loc)) + print code + print + """ value = SourceCode.from_function(v, filepath=filepath) data[k] = value diff --git a/src/rez/utils/data_utils.py b/src/rez/utils/data_utils.py index da4132ea8..a002578dd 100644 --- a/src/rez/utils/data_utils.py +++ b/src/rez/utils/data_utils.py @@ -307,6 +307,10 @@ class AttributeForwardMeta(type): forwarding is skipped for that attribute. If the wrapped object does not contain an attribute, the forwarded value will be None. + If the parent class contains method '_wrap_forwarded', then forwarded values + are passed to this function, and the return value becomes the attribute + value. + The class must contain: - keys (list of str): The attributes to be forwarded. @@ -352,7 +356,12 @@ def _defined(x): @classmethod def _make_forwarder(cls, key): def func(self): - return getattr(self.wrapped, key, None) + value = getattr(self.wrapped, key, None) + + if hasattr(self, "_wrap_forwarded"): + value = self._wrap_forwarded(key, value) + + return value return property(func) diff --git a/src/rez/utils/sourcecode.py b/src/rez/utils/sourcecode.py index 6dd6ee5d1..7841eb54e 100644 --- a/src/rez/utils/sourcecode.py +++ b/src/rez/utils/sourcecode.py @@ -1,8 +1,10 @@ from rez.utils.formatting import indent +from rez.utils.data_utils import cached_property from rez.utils.logging_ import print_debug from inspect import getsourcelines from textwrap import dedent from glob import glob +import traceback import os.path import imp @@ -10,8 +12,8 @@ def early(): """Used by functions in package.py to harden to the return value at build time. - The term 'early' refers to the fact these package attribute are boud 'early', - as opposed to 'late' bindings, which evaluate lazily. + The term 'early' refers to the fact these package attribute are evaluated + early, ie at build time and before a package is installed. """ def decorated(fn): setattr(fn, "_early", True) @@ -20,6 +22,24 @@ def decorated(fn): return decorated +def late(): + """Used by functions in package.py that are evaluated lazily. + + The term 'late' refers to the fact these package attributes are evaluated + late, ie when the attribute is queried for the first time. + + If you want to implement a package.py attribute as a function, you MUST use + this decorator - otherwise it is understood that you want your attribute to + be a function, not the return value of that function. + """ + def decorated(fn): + setattr(fn, "_late", True) + _add_decorator(fn, "late") + return fn + + return decorated + + def include(module_name, *module_names): """Used by functions in package.py to have access to named modules. @@ -40,6 +60,18 @@ def _add_decorator(fn, name, **kwargs): fn._decorators.append(kwargs) +class SourceCodeError(Exception): + pass + + +class SourceCodeCompileError(Exception): + pass + + +class SourceCodeExecError(Exception): + pass + + class SourceCode(object): """Wrapper for python source code. @@ -51,12 +83,13 @@ def __init__(self, source, func=None, filepath=None): self.func = func self.filepath = filepath self.package = None - self.pyc = None def copy(self): other = SourceCode(source=self.source, func=self.func) return other + # TODO this breaks in cases like: comment after decorator and before sig; + # sig broken over multiple lines, etc @classmethod def from_function(cls, func, filepath=None): # get txt of function body. Skips sig and any decorators. Assumes that @@ -90,48 +123,92 @@ def from_function(cls, func, filepath=None): value.func = func value.filepath = filepath value.package = None - value.pyc = None return value - @property - def compiled(self): - if self.pyc is not None: - return self.pyc + @cached_property + def includes(self): + info = self._get_decorator_info("include") + if not info: + return None + + return set(info.get("nargs", [])) + + @cached_property + def late_binding(self): + info = self._get_decorator_info("late") + return bool(info) + + @cached_property + def evaluated_code(self): + # turn into a function, because code may use return clause + if self.func: + funcname = self.func.__name__ + else: + funcname = "_unnamed" + + code = indent(self.source) + code = ("def %s():\n" % funcname + + code + + "\n_result = %s()" % funcname) - code = self.source - if code and code[0] in (' ', '\t'): - code = "if True:\n" + code + return code + @property + def sourcename(self): if self.filepath: filename = self.filepath else: - filename = "" + filename = "string" if self.func: filename += ":%s" % self.func.__name__ - self.pyc = compile(code, filename, 'exec') - return self.pyc + return "<%s>" % filename + + @property + def function_name(self): + if self.func: + return self.func.__name__ + else: + return None + + @cached_property + def compiled(self): + try: + pyc = compile(self.evaluated_code, self.sourcename, 'exec') + except Exception as e: + stack = traceback.format_exc() + raise SourceCodeCompileError( + "Failed to compile %s:\n\n%s" % (self.sourcename, stack)) + + return pyc def set_package(self, package): # this is needed to load @included modules self.package = package - def exec_(self, globals_): + def exec_(self, globals_={}): # bind import modules if self.package is not None: - module_names = self.get_includes() - if module_names: + if self.includes: globals_ = globals_.copy() - for name in module_names: + for name in self.includes: module = include_module_manager.load_module(name, self.package) globals_[name] = module # exec - pyc = self.pyc - exec pyc in globals_ + pyc = self.compiled + + try: + exec pyc in globals_ + except Exception as e: + stack = traceback.format_exc() + raise SourceCodeExecError( + "Failed to execute %s:\n\n%s" % (self.sourcename, stack)) + + return globals_.get("_result") def to_text(self, funcname): # don't indent code if already indented @@ -152,13 +229,6 @@ def to_text(self, funcname): return txt - def get_includes(self): - info = self._get_decorator_info("include") - if not info: - return None - - return set(info.get("nargs", [])) - def _get_decorator_info(self, name): if not self.func: return None @@ -172,15 +242,6 @@ def _get_decorator_info(self, name): return matches[0] - """ - def corrected_for_indent(self): - if self.source and self.source[0] in (' ', '\t'): - new_source = "if True:\n" + self.source - return SourceCode(new_source) - else: - return self - """ - def __eq__(self, other): return (isinstance(other, SourceCode) and other.source == self.source) From affe1117bc000e932266577ae5d8e3d3ee59e923 Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 21 Feb 2017 14:54:12 +1100 Subject: [PATCH 032/124] -first pass @late() support complete -requires() works, required more changes than expected --- src/rez/package_maker__.py | 10 ++- src/rez/package_resources_.py | 57 +++++++++----- src/rez/package_serialise.py | 9 ++- src/rez/packages_.py | 88 +++++++++++++-------- src/rez/resolved_context.py | 75 ++++++++++-------- src/rez/resolver.py | 13 +++- src/rez/rex.py | 91 +++++----------------- src/rez/rex_bindings.py | 6 ++ src/rez/serialise.py | 25 +++--- src/rez/solver.py | 12 ++- src/rez/util.py | 2 +- src/rez/utils/__init__.py | 11 +++ src/rez/utils/data_utils.py | 1 + src/rez/utils/memcached.py | 2 +- src/rez/utils/sourcecode.py | 141 ++++++++++++++++++++-------------- 15 files changed, 300 insertions(+), 243 deletions(-) diff --git a/src/rez/package_maker__.py b/src/rez/package_maker__.py index df2ad05eb..32280f13d 100644 --- a/src/rez/package_maker__.py +++ b/src/rez/package_maker__.py @@ -25,15 +25,17 @@ Optional('description'): basestring, Optional('authors'): [basestring], - Optional('requires'): [package_request_schema], - Optional('build_requires'): [package_request_schema], - Optional('private_build_requires'): [package_request_schema], + Optional('requires'): late_bound([package_request_schema]), + Optional('build_requires'): late_bound([package_request_schema]), + Optional('private_build_requires'): late_bound([package_request_schema]), + + # deliberately not possible to late bind Optional('variants'): [[package_request_schema]], Optional('uuid'): basestring, Optional('config'): dict, Optional('tools'): late_bound([basestring]), - Optional('help'): help_schema, + Optional('help'): late_bound(help_schema), Optional('pre_commands'): _commands_schema, Optional('commands'): _commands_schema, diff --git a/src/rez/package_resources_.py b/src/rez/package_resources_.py index d6d8502ea..44ea1a194 100644 --- a/src/rez/package_resources_.py +++ b/src/rez/package_resources_.py @@ -29,6 +29,13 @@ "postprocess", ) +# package attributes that are rex-based functions +package_rex_keys = ( + "pre_commands", + "commands", + "post_commands" +) + #------------------------------------------------------------------------------ # utility schemas @@ -42,6 +49,11 @@ def late_bound(schema): return Or(SourceCode, schema) +# used when 'requires' is late bound +late_requires_schema = Schema([ + Or(PackageRequest, And(basestring, Use(PackageRequest))) +]) + #------------------------------------------------------------------------------ # schema dicts @@ -67,19 +79,19 @@ def late_bound(schema): Optional('authors'): [basestring], # dependencies - Optional('requires'): [PackageRequest], - Optional('build_requires'): [PackageRequest], - Optional('private_build_requires'): [PackageRequest], + Optional('requires'): late_bound([PackageRequest]), + Optional('build_requires'): late_bound([PackageRequest]), + Optional('private_build_requires'): late_bound([PackageRequest]), # plugins - Optional('has_plugins'): bool, - Optional('plugin_for'): [basestring], + Optional('has_plugins'): late_bound(bool), + Optional('plugin_for'): late_bound([basestring]), # general Optional('uuid'): basestring, Optional('config'): Config, Optional('tools'): late_bound([basestring]), - Optional('help'): help_schema, + Optional('help'): late_bound(help_schema), # commands Optional('pre_commands'): SourceCode, @@ -96,14 +108,16 @@ def late_bound(schema): Optional('vcs'): basestring, # arbitrary fields - Optional(basestring): object + Optional(basestring): late_bound(object) }) # package package_schema_dict = package_base_schema_dict.copy() package_schema_dict.update({ + # deliberately not possible to late bind Optional("variants"): [[PackageRequest]], + Optional("postprocess"): SourceCode }) @@ -149,19 +163,21 @@ def late_bound(schema): Optional('description'): large_string_dict, Optional('authors'): [basestring], - Optional('requires'): [_package_request_schema], - Optional('build_requires'): [_package_request_schema], - Optional('private_build_requires'): [_package_request_schema], + Optional('requires'): late_bound([_package_request_schema]), + Optional('build_requires'): late_bound([_package_request_schema]), + Optional('private_build_requires'): late_bound([_package_request_schema]), + + # deliberately not possible to late bind Optional('variants'): [[_package_request_schema]], - Optional('has_plugins'): bool, - Optional('plugin_for'): [basestring], + Optional('has_plugins'): late_bound(bool), + Optional('plugin_for'): late_bound([basestring]), Optional('uuid'): basestring, Optional('config'): And(dict, Use(lambda x: create_config(overrides=x))), Optional('tools'): late_bound([basestring]), - Optional('help'): help_schema, + Optional('help'): late_bound(help_schema), Optional('pre_commands'): _commands_schema, Optional('commands'): _commands_schema, @@ -178,7 +194,7 @@ def late_bound(schema): Optional('vcs'): basestring, # arbitrary keys - Optional(basestring): object + Optional(basestring): late_bound(object) }) @@ -352,9 +368,9 @@ def _convert_to_rex(self, commands): commands = convert_old_commands(commands) if isinstance(commands, basestring): - return SourceCode(commands) + return SourceCode(source=commands) elif callable(commands): - return SourceCode.from_function(commands) + return SourceCode(func=commands) else: return commands @@ -402,10 +418,10 @@ def _root(self): root = os.path.join(self.base, self.subpath) return root - @cached_property - def requires(self): - reqs = self.parent.requires or [] - return reqs + self.variant_requires + #@cached_property + #def requires(self): + # reqs = self.parent.requires or [] + # return reqs + self.variant_requires @cached_property def variant_requires(self): @@ -420,7 +436,6 @@ def variant_requires(self): "Unexpected error - variant %s cannot be found in its " "parent package %s" % (self.uri, self.parent.uri)) - @property def wrapped(self): # forward Package attributes onto ourself return self.parent diff --git a/src/rez/package_serialise.py b/src/rez/package_serialise.py index b8ecd5d5b..296ce23c3 100644 --- a/src/rez/package_serialise.py +++ b/src/rez/package_serialise.py @@ -56,16 +56,17 @@ Optional("authors"): [basestring], Optional("tools"): late_bound([basestring]), - Optional('requires'): [package_request_schema], - Optional('build_requires'): [package_request_schema], - Optional('private_build_requires'): [package_request_schema], + Optional('requires'): late_bound([package_request_schema]), + Optional('build_requires'): late_bound([package_request_schema]), + Optional('private_build_requires'): late_bound([package_request_schema]), + Optional('variants'): [[package_request_schema]], Optional('pre_commands'): source_code_schema, Optional('commands'): source_code_schema, Optional('post_commands'): source_code_schema, - Optional("help"): help_schema, + Optional("help"): late_bound(help_schema), Optional("uuid"): basestring, Optional("config"): dict, diff --git a/src/rez/packages_.py b/src/rez/packages_.py index 9a37eb477..0f4336506 100644 --- a/src/rez/packages_.py +++ b/src/rez/packages_.py @@ -1,7 +1,7 @@ from rez.package_repository import package_repository_manager from rez.package_resources_ import PackageFamilyResource, PackageResource, \ VariantResource, package_family_schema, package_schema, variant_schema, \ - package_release_keys + package_release_keys, late_requires_schema from rez.package_serialise import dump_package_data from rez.utils.logging_ import print_info, print_error from rez.utils.sourcecode import SourceCode @@ -60,10 +60,18 @@ def iter_packages(self): class PackageBaseResourceWrapper(PackageRepositoryResourceWrapper): """Abstract base class for `Package` and `Variant`. """ - def __init__(self, *nargs, **kwargs): - super(PackageBaseResourceWrapper, self).__init__(*nargs, **kwargs) + late_bind_schemas = { + "requires": late_requires_schema + } + + def __init__(self, resource, context=None): + super(PackageBaseResourceWrapper, self).__init__(resource) + self.context = context self._late_bindings = {} + def set_context(self, context): + self.context = context + def arbitrary_keys(self): raise NotImplementedError @@ -125,19 +133,29 @@ def _wrap_forwarded(self, key, value): if value_ is _missing: value_ = self._eval_late_binding(value) + + schema = self.late_bind_schemas.get(key) + if schema is not None: + value_ = schema.validate(value_) + self._late_bindings[key] = value_ return value_ else: return value - def _eval_late_binding(self, sourcecode, globals_={}): - g = { - "this": self, - "in_context": lambda: False - } + def _eval_late_binding(self, sourcecode): + g = {} - g.update(globals_) + if self.context is None: + g["in_context"] = lambda: False + else: + g["in_context"] = lambda: True + g["context"] = self.context + + # 'request', 'system' etc + bindings = self.context._get_pre_resolve_bindings() + g.update(bindings) sourcecode.set_package(self) return sourcecode.exec_(globals_=g) @@ -152,14 +170,16 @@ class Package(PackageBaseResourceWrapper): """ keys = schema_keys(package_schema) - def __init__(self, resource): + def __init__(self, resource, context=None): _check_class(resource, PackageResource) - super(Package, self).__init__(resource) + super(Package, self).__init__(resource, context) # arbitrary keys def __getattr__(self, name): if name in self.data: - return self.data[name] + value = self.data[name] + return self._wrap_forwarded(name, value) + #return self.data[name] else: raise AttributeError("Package instance has no attribute '%s'" % name) @@ -206,7 +226,7 @@ def iter_variants(self): """ repo = self.resource._repository for variant in repo.iter_variants(self.resource): - yield Variant(variant) + yield Variant(variant, context=self.context) def get_variant(self, index=None): """Get the variant with the associated index. @@ -229,13 +249,9 @@ class Variant(PackageBaseResourceWrapper): keys = schema_keys(variant_schema) keys.update(["index", "root", "subpath"]) - def __init__(self, resource): + def __init__(self, resource, context=None): _check_class(resource, VariantResource) - super(Variant, self).__init__(resource) - self.context = None - - def set_context(self, context): - self.context = context + super(Variant, self).__init__(resource, context) # arbitrary keys def __getattr__(self, name): @@ -271,7 +287,22 @@ def parent(self): """ repo = self.resource._repository package = repo.get_parent_package(self.resource) - return Package(package) + return Package(package, context=self.context) + + @property + def requires(self): + """Get variant requirements. + + This is a concatenation of the package requirements and those if this + specific variant. + """ + package_requires = self.parent.requires or [] + + if self.index is None: + return package_requires + else: + variant_requires = self.parent.variants[self.index] or [] + return package_requires + variant_requires def get_requires(self, build_requires=False, private_build_requires=False): """Get the requirements of the variant. @@ -285,10 +316,12 @@ def get_requires(self, build_requires=False, private_build_requires=False): List of `Requirement` objects. """ requires = self.requires or [] + if build_requires: requires = requires + (self.build_requires or []) if private_build_requires: requires = requires + (self.private_build_requires or []) + return requires def install(self, path, dry_run=False, overrides=None): @@ -322,17 +355,6 @@ def install(self, path, dry_run=False, overrides=None): else: return Variant(resource) - def _eval_late_binding(self, sourcecode): - g = {} - - if self.context is None: - g["in_context"] = lambda: False - else: - g["in_context"] = lambda: True - g["context"] = self.context - - return super(Variant, self)._eval_late_binding(sourcecode, globals_=g) - class PackageSearchPath(object): """A list of package repositories. @@ -518,7 +540,7 @@ def create_package(name, data, package_cls=None): return maker.get_package() -def get_variant(variant_handle): +def get_variant(variant_handle, context=None): """Create a variant given its handle (or serialized dict equivalent) Args: @@ -535,7 +557,7 @@ def get_variant(variant_handle): variant_handle = ResourceHandle.from_dict(variant_handle) variant_resource = package_repository_manager.get_resource_from_handle(variant_handle) - variant = Variant(variant_resource) + variant = Variant(variant_resource, context=context) return variant diff --git a/src/rez/resolved_context.py b/src/rez/resolved_context.py index a64c7df27..dc7847c4d 100644 --- a/src/rez/resolved_context.py +++ b/src/rez/resolved_context.py @@ -5,6 +5,7 @@ from rez.system import system from rez.config import config from rez.util import shlex_join, dedup +from rez.utils.sourcecode import SourceCodeError from rez.utils.colorize import critical, heading, local, implicit, Printer from rez.utils.formatting import columnise, PackageRequest from rez.utils.filesystem import TempDirs @@ -223,6 +224,10 @@ def __init__(self, package_requests, verbosity=0, timestamp=None, self.solve_time = 0.0 # inclusive of load time self.load_time = 0.0 + # the pre-resolve bindings. We store these because @late package.py + # functions need them, and we cache them to avoid cost + self.pre_resolve_bindings = None + # suite information self.parent_suite_path = None self.suite_context_name = None @@ -235,7 +240,8 @@ def __init__(self, package_requests, verbosity=0, timestamp=None, request = self.requested_packages(include_implicit=True) - resolver = Resolver(package_requests=request, + resolver = Resolver(context=self, + package_requests=request, package_paths=self.package_paths, package_filter=self.package_filter, package_orderers=self.package_orderers, @@ -509,6 +515,19 @@ def write_to_buffer(self, buf): buf.write(content) + @classmethod + def get_current(cls): + """Get the context for the current env, if there is one. + + Returns: + `ResolvedContext`: Current context, or None if not in a resolved env. + """ + filepath = os.getenv("REZ_RXT_FILE") + if not filepath or not os.path.exists(filepath): + return None + + return cls.load(filepath) + @classmethod def load(cls, path): """Load a resolved context from file.""" @@ -1302,6 +1321,7 @@ def _print_version(value): # create and init the context r = ResolvedContext.__new__(ResolvedContext) r.load_path = None + r.pre_resolve_bindings = None r.timestamp = d["timestamp"] r.building = d["building"] @@ -1406,6 +1426,17 @@ def _create_executor(self, interpreter, parent_environ): parent_environ=parent_environ, parent_variables=parent_vars) + def _get_pre_resolve_bindings(self): + if self.pre_resolve_bindings is None: + self.pre_resolve_bindings = { + "system": system, + "building": self.building, + "request": RequirementsBinding(self._package_requests), + "implicits": RequirementsBinding(self.implicit_packages) + } + + return self.pre_resolve_bindings + @pool_memcached_connections def _execute(self, executor): br = '#' * 80 @@ -1455,17 +1486,19 @@ def _minor_heading(txt): executor.setenv("REZ_RAW_REQUEST", request_str_) executor.setenv("REZ_RESOLVE_MODE", "latest") - executor.bind('request', RequirementsBinding(self._package_requests)) - executor.bind('implicits', RequirementsBinding(self.implicit_packages)) + # binds objects such as 'request', which are accessible before a resolve + bindings = self._get_pre_resolve_bindings() + for k, v in bindings.iteritems(): + executor.bind(k, v) + executor.bind('resolve', VariantsBinding(resolved_pkgs)) - executor.bind('building', self.building) # # -- apply each resolved package to the execution context # _heading("package variables") - error_class = Exception if config.catch_rex_errors else None + error_class = SourceCodeError if config.catch_rex_errors else None # set basic package variables and create per-package bindings bindings = {} @@ -1505,30 +1538,6 @@ def _minor_heading(txt): executor.bind('root', pkg.root) executor.bind('base', pkg.base) - """ - # show a meaningful filename in traceback if an error occurs. - # Can't use an actual filepath because (a) the package may not - # have one and (b) it might be a yaml file (line numbers would - # not match up in this case). - filename = "<%s>" % pkg.uri - - exc = None - trace = None - try: - executor.execute_code(commands.source, filename=filename) - except IndentationError as e: - commands_ = commands.corrected_for_indent() - if commands_ is commands: - exc = e - trace = traceback.format_exc() - else: - try: - executor.execute_code(commands_.source, filename=filename) - except error_class as e: - exc = e - trace = traceback.format_exc() - """ - exc = None trace = None commands.set_package(pkg) @@ -1537,14 +1546,14 @@ def _minor_heading(txt): executor.execute_code(commands) except error_class as e: exc = e - trace = traceback.format_exc() if exc: - msg = "Error in %s in package %r:\n" % (attr, pkg.uri) + header = "Error in %s in package %r:\n" % (attr, pkg.uri) if self.verbosity >= 2: - msg += trace + msg = header + str(exc) else: - msg += str(exc) + msg = header + exc.short_msg + raise PackageCommandError(msg) _heading("post system setup") diff --git a/src/rez/resolver.py b/src/rez/resolver.py index 6f363c02d..15f4540ec 100644 --- a/src/rez/resolver.py +++ b/src/rez/resolver.py @@ -31,7 +31,7 @@ class Resolver(object): The Resolver uses a combination of Solver(s) and cache(s) to resolve a package request as quickly as possible. """ - def __init__(self, package_requests, package_paths, package_filter=None, + def __init__(self, context, package_requests, package_paths, package_filter=None, package_orderers=None, timestamp=0, callback=None, building=False, verbosity=False, buf=None, package_load_callback=None, caching=True): """Create a Resolver. @@ -50,6 +50,7 @@ def __init__(self, package_requests, package_paths, package_filter=None, caching: If True, cache(s) may be used to speed the resolve. If False, caches will not be used. """ + self.context = context self.package_requests = package_requests self.package_paths = package_paths self.timestamp = timestamp @@ -147,6 +148,9 @@ def graph(self): """ return self.graph_ + def _get_variant(self, variant_handle): + return get_variant(variant_handle, context=self.context) + def _get_cached_solve(self): """Find a memcached resolve. @@ -218,7 +222,7 @@ def _retrieve(timestamped): def _packages_changed(key, data): solver_dict, _, variant_states_dict = data for variant_handle in solver_dict.get("variant_handles", []): - variant = get_variant(variant_handle) + variant = self._get_variant(variant_handle) old_state = variant_states_dict.get(variant.name) new_state = variant_states.get(variant) @@ -360,7 +364,8 @@ def _memcache_key(self, timestamped=False): return str(tuple(t)) def _solve(self): - solver = Solver(package_requests=self.package_requests, + solver = Solver(context=self.context, + package_requests=self.package_requests, package_paths=self.package_paths, package_filter=self.package_filter, package_orderers=self.package_orderers, @@ -386,7 +391,7 @@ def _set_result(self, solver_dict): # convert solver.Variants to packages.Variants self.resolved_packages_ = [] for variant_handle in solver_dict.get("variant_handles", []): - variant = get_variant(variant_handle) + variant = self._get_variant(variant_handle) self.resolved_packages_.append(variant) @classmethod diff --git a/src/rez/rex.py b/src/rez/rex.py index 7e1c594d0..5b70556f9 100644 --- a/src/rez/rex.py +++ b/src/rez/rex.py @@ -5,12 +5,14 @@ import re import UserDict import inspect +import traceback from string import Formatter from rez.system import system from rez.config import config from rez.exceptions import RexError, RexUndefinedVariableError, RezSystemError from rez.util import shlex_join -from rez.utils.sourcecode import SourceCode +from rez.utils import reraise +from rez.utils.sourcecode import SourceCode, SourceCodeError from rez.utils.data_utils import AttrDictWrapper from rez.utils.formatting import expandvars from rez.vendor.enum import Enum @@ -1129,18 +1131,11 @@ def compile_code(cls, code, filename=None, exec_namespace=None): Returns: Compiled code object. """ - if isinstance(code, SourceCode): - evaluated_code = code.evaluated_code - funcname = code.function_name - - if filename is None: + if filename is None: + if isinstance(code, SourceCode): filename = code.sourcename - else: - evaluated_code = code - funcname = "" - - filename = filename or "" - error_class = Exception if config.catch_rex_errors else None + else: + filename = "" # compile try: @@ -1148,20 +1143,13 @@ def compile_code(cls, code, filename=None, exec_namespace=None): pyc = code.compiled else: pyc = compile(code, filename, 'exec') - except error_class as e: - # trim trace down to only what's interesting - msg = str(e) - r = re.compile(" line ([1-9][0-9]*)") - match = r.search(str(e)) - if match: - try: - lineno = int(match.groups()[0]) - loc = evaluated_code.split('\n') - line = loc[lineno - 1] - msg += "\n %s" % line - except: - pass - raise RexError(msg) + except SourceCodeError as e: + reraise(e, RexError) + except Exception as e: + stack = traceback.format_exc() + raise RexError("Failed to compile %s:\n\n%s" % (filename, stack)) + + error_class = Exception if config.catch_rex_errors else None # execute if exec_namespace is not None: @@ -1170,13 +1158,11 @@ def compile_code(cls, code, filename=None, exec_namespace=None): code.exec_(globals_=exec_namespace) else: exec pyc in exec_namespace + except SourceCodeError as e: + reraise(e, RexError) except error_class as e: - # trim trace down to only what's interesting - import traceback - frames = traceback.extract_tb(sys.exc_traceback) - frames = [x for x in frames if x[0] == filename] - cls._patch_frames(frames, evaluated_code, filename, funcname) - cls._raise_rex_error(frames, e, sourcename=filename) + stack = traceback.format_exc() + raise RexError("Failed to exec %s:\n\n%s" % (filename, stack)) return pyc @@ -1204,19 +1190,14 @@ def execute_function(self, func, *nargs, **kwargs): argdefs=func.func_defaults, closure=func.func_closure) fn.func_globals.update(self.globals) + error_class = Exception if config.catch_rex_errors else None try: return fn(*nargs, **kwargs) except error_class as e: - # trim trace down to only what's interesting - import traceback - frames = traceback.extract_tb(sys.exc_traceback) - - filepath = inspect.getfile(func) - if os.path.exists(filepath): - frames = [x for x in frames if x[0] == filepath] - self._raise_rex_error(frames, e, sourcename=func.__name__) + stack = traceback.format_exc() + raise RexError("Failed to exec %s:\n\n%s" % (filename, stack)) def get_output(self, style=OutputStyle.file): """Returns the result of all previous calls to execute_code.""" @@ -1225,36 +1206,6 @@ def get_output(self, style=OutputStyle.file): def expand(self, value): return self.formatter.format(str(value)) - @classmethod - def _patch_frames(cls, frames, code, codefile=None, funcname=None): - """Patch traceback's frame objects to add lines of code from `code` - where appropriate. - """ - codefile = codefile or "" - loc = code.split('\n') - for i, frame in enumerate(frames): - filename, lineno, name, line = frame - if filename == codefile and line is None: - try: - line = loc[lineno - 1].strip() - funcname = funcname or "" - frames[i] = (filename, lineno, funcname, line) - except: - pass - - @classmethod - def _raise_rex_error(cls, frames, e, sourcename=None): - import traceback - - stack = ''.join(traceback.format_list(frames)).strip() - sourcename = sourcename or "rex code" - - if isinstance(e, RexError): - raise type(e)("%s\n%s" % (str(e), stack)) - else: - raise RexError("Error in %s: %s - %s\n%s" - % (sourcename, e.__class__.__name__, str(e), stack)) - # Copyright 2013-2016 Allan Johns. # diff --git a/src/rez/rex_bindings.py b/src/rez/rex_bindings.py index 58ccf5f44..90a053b77 100644 --- a/src/rez/rex_bindings.py +++ b/src/rez/rex_bindings.py @@ -142,6 +142,12 @@ def __init__(self, requirements): def _attr_error(self, attr): raise AttributeError("request does not exist: '%s'" % attr) + def __getitem__(self, name): + if name in self.__requirements: + return self.__requirements[name] + else: + self._attr_error(name) + def __contains__(self, name): return (name in self.__requirements) diff --git a/src/rez/serialise.py b/src/rez/serialise.py index f1d20bf9d..e35c81403 100644 --- a/src/rez/serialise.py +++ b/src/rez/serialise.py @@ -1,6 +1,7 @@ """ Read and write data from file. File caching via a memcached server is supported. """ +from rez.package_resources_ import package_rex_keys from rez.utils.scope import ScopeContext from rez.utils.sourcecode import SourceCode, early, late, include from rez.utils.logging_ import print_debug @@ -177,16 +178,20 @@ def process_python_objects(data, filepath=None): with add_sys_paths(config.package_definition_build_python_paths): value = v() else: - """ - print - print 'XXXXXXXXXXXXXXXXXXXXXX', v - from inspect import getsourcelines - loc = getsourcelines(v)[0] - code = (''.join(loc)) - print code - print - """ - value = SourceCode.from_function(v, filepath=filepath) + # if a rex function, the code has to be eval'd NOT as a function, + # otherwise the globals dict doesn't get updated with any vars + # defined in the code, and that means rex code like this: + # + # rr = 'test' + # env.RR = '{rr}' + # + # ..won't work. It was never intentional that the above work, but + # it does, so now we have to keep it so. + # + as_function = (v.__name__ not in package_rex_keys) + + value = SourceCode(func=v, filepath=filepath, + eval_as_function=as_function) data[k] = value elif isinstance(v, dict): diff --git a/src/rez/solver.py b/src/rez/solver.py index c114d59d7..ff58456ed 100644 --- a/src/rez/solver.py +++ b/src/rez/solver.py @@ -535,8 +535,13 @@ def __init__(self, package_name, solver): # note: we do not apply package filters here, because doing so might # cause package loads (eg, timestamp rules). We only apply filters # during an intersection, which minimises the amount of filtering. - it = iter_packages(self.package_name, paths=self.solver.package_paths) - self.entries = [[x, False] for x in it] + # + self.entries = [] + + for package in iter_packages(self.package_name, + paths=self.solver.package_paths): + package.set_context(solver.context) + self.entries.append([package, False]) if not self.entries: raise PackageFamilyNotFoundError( @@ -1749,7 +1754,7 @@ class Solver(_Common): """ max_verbosity = 3 - def __init__(self, package_requests, package_paths, package_filter=None, + def __init__(self, context, package_requests, package_paths, package_filter=None, package_orderers=None, callback=None, building=False, optimised=True, verbosity=0, buf=None, package_load_callback=None, prune_unfailed=True): @@ -1779,6 +1784,7 @@ def __init__(self, package_requests, package_paths, package_filter=None, True, any packages unrelated to the conflict are removed from the graph. """ + self.context = context self.package_paths = package_paths self.package_filter = package_filter self.package_orderers = package_orderers diff --git a/src/rez/util.py b/src/rez/util.py index 5a9e0a1f1..2101f48d2 100644 --- a/src/rez/util.py +++ b/src/rez/util.py @@ -46,7 +46,7 @@ def create_executable_script(filepath, body, program=None): program = program or "python" if callable(body): from rez.utils.sourcecode import SourceCode - code = SourceCode.from_function(body) + code = SourceCode(func=body) body = code.source if not body.endswith('\n'): diff --git a/src/rez/utils/__init__.py b/src/rez/utils/__init__.py index f49e68d05..b5c75cf4d 100644 --- a/src/rez/utils/__init__.py +++ b/src/rez/utils/__init__.py @@ -1,3 +1,14 @@ +import sys + + +def reraise(exc, new_exc_cls=None, format_str=None): + if new_exc_cls is None: + raise + + if format_str is None: + format_str = "%s" + + raise new_exc_cls, format_str % exc, sys.exc_info()[2] # Copyright 2013-2016 Allan Johns. diff --git a/src/rez/utils/data_utils.py b/src/rez/utils/data_utils.py index a002578dd..3b187b174 100644 --- a/src/rez/utils/data_utils.py +++ b/src/rez/utils/data_utils.py @@ -414,6 +414,7 @@ def _defined(x): "%r, already defined" % attr) else: attr = key + members[attr] = cls._make_getter(key, attr, optional, key_schema) if schema or not _defined("schema"): diff --git a/src/rez/utils/memcached.py b/src/rez/utils/memcached.py index 08ca603db..b9edf951d 100644 --- a/src/rez/utils/memcached.py +++ b/src/rez/utils/memcached.py @@ -9,7 +9,7 @@ # this version should be changed if and when the caching interface changes -cache_interface_version = 1 +cache_interface_version = 2 class Client(object): diff --git a/src/rez/utils/sourcecode.py b/src/rez/utils/sourcecode.py index 7841eb54e..7dda062d2 100644 --- a/src/rez/utils/sourcecode.py +++ b/src/rez/utils/sourcecode.py @@ -32,7 +32,16 @@ def late(): this decorator - otherwise it is understood that you want your attribute to be a function, not the return value of that function. """ + from rez.package_resources_ import package_rex_keys + def decorated(fn): + + # this is done here rather than in standard schema validation because + # the latter causes a very obfuscated error message + if fn.__name__ in package_rex_keys: + raise ValueError("Cannot use @late decorator on function '%s'" + % fn.__name__) + setattr(fn, "_late", True) _add_decorator(fn, "late") return fn @@ -61,14 +70,16 @@ def _add_decorator(fn, name, **kwargs): class SourceCodeError(Exception): - pass + def __init__(self, msg, short_msg): + super(SourceCodeError, self).__init__(msg) + self.short_msg = short_msg -class SourceCodeCompileError(Exception): +class SourceCodeCompileError(SourceCodeError): pass -class SourceCodeExecError(Exception): +class SourceCodeExecError(SourceCodeError): pass @@ -78,24 +89,39 @@ class SourceCode(object): This object is aware of the decorators defined in this sourcefile (such as 'include') and deals with them appropriately. """ - def __init__(self, source, func=None, filepath=None): - self.source = source.rstrip() + def __init__(self, source=None, func=None, filepath=None, + eval_as_function=True): + self.source = (source or '').rstrip() self.func = func self.filepath = filepath + self.eval_as_function = eval_as_function self.package = None + self.funcname = None + self.decorators = [] + + if self.func is not None: + self._init_from_func() + def copy(self): - other = SourceCode(source=self.source, func=self.func) + other = SourceCode.__new__(SourceCode) + other.source = self.source + other.func = self.func + other.filepath = self.filepath + other.eval_as_function = self.eval_as_function + other.package =self.package + other.funcname = self.funcname + other.decorators = self.decorators + return other - # TODO this breaks in cases like: comment after decorator and before sig; - # sig broken over multiple lines, etc - @classmethod - def from_function(cls, func, filepath=None): + def _init_from_func(self): + self.funcname = self.func.__name__ + self.decorators = getattr(self.func, "_decorators", []) + # get txt of function body. Skips sig and any decorators. Assumes that # only the decorators in this file (such as 'include') are used. - num_decorators = len(getattr(func, "_decorators", [])) - loc = getsourcelines(func)[0][num_decorators + 1:] + loc = getsourcelines(self.func)[0][len(self.decorators) + 1:] code = dedent(''.join(loc)) # align lines that start with a comment (#) @@ -118,13 +144,7 @@ def from_function(cls, func, filepath=None): code = '\n'.join(codelines).rstrip() code = dedent(code) - value = SourceCode.__new__(SourceCode) - value.source = code - value.func = func - value.filepath = filepath - value.package = None - - return value + self.source = code @cached_property def includes(self): @@ -141,16 +161,15 @@ def late_binding(self): @cached_property def evaluated_code(self): - # turn into a function, because code may use return clause - if self.func: - funcname = self.func.__name__ - else: - funcname = "_unnamed" + if self.eval_as_function: + funcname = self.funcname or "_unnamed" - code = indent(self.source) - code = ("def %s():\n" % funcname - + code - + "\n_result = %s()" % funcname) + code = indent(self.source) + code = ("def %s():\n" % funcname + + code + + "\n_result = %s()" % funcname) + else: + code = "if True:\n" + indent(self.source) return code @@ -161,18 +180,11 @@ def sourcename(self): else: filename = "string" - if self.func: - filename += ":%s" % self.func.__name__ + if self.funcname: + filename += ":%s" % self.funcname return "<%s>" % filename - @property - def function_name(self): - if self.func: - return self.func.__name__ - else: - return None - @cached_property def compiled(self): try: @@ -180,7 +192,8 @@ def compiled(self): except Exception as e: stack = traceback.format_exc() raise SourceCodeCompileError( - "Failed to compile %s:\n\n%s" % (self.sourcename, stack)) + "Failed to compile %s:\n%s" % (self.sourcename, stack), + short_msg=str(e)) return pyc @@ -190,13 +203,10 @@ def set_package(self, package): def exec_(self, globals_={}): # bind import modules - if self.package is not None: - if self.includes: - globals_ = globals_.copy() - - for name in self.includes: - module = include_module_manager.load_module(name, self.package) - globals_[name] = module + if self.package is not None and self.includes: + for name in self.includes: + module = include_module_manager.load_module(name, self.package) + globals_[name] = module # exec pyc = self.compiled @@ -206,7 +216,8 @@ def exec_(self, globals_={}): except Exception as e: stack = traceback.format_exc() raise SourceCodeExecError( - "Failed to execute %s:\n\n%s" % (self.sourcename, stack)) + "Failed to execute %s:\n%s" % (self.sourcename, stack), + short_msg=str(e)) return globals_.get("_result") @@ -219,29 +230,41 @@ def to_text(self, funcname): txt = "def %s():\n%s" % (funcname, source) - if self.func and hasattr(self.func, "_decorators"): - for entry in self.func._decorators: - nargs_str = ", ".join(map(repr, entry.get("nargs", []))) - name_str = entry.get("name") - sig = "@%s(%s)" % (name_str, nargs_str) + for entry in self.decorators: + nargs_str = ", ".join(map(repr, entry.get("nargs", []))) + name_str = entry.get("name") + sig = "@%s(%s)" % (name_str, nargs_str) - txt = sig + '\n' + txt + txt = sig + '\n' + txt return txt def _get_decorator_info(self, name): - if not self.func: - return None - - if not hasattr(self.func, "_decorators"): - return None - - matches = [x for x in self.func._decorators if x.get("name") == name] + matches = [x for x in self.decorators if x.get("name") == name] if not matches: return None return matches[0] + def __getstate__(self): + return { + "source": self.source, + "filepath": self.filepath, + "funcname": self.funcname, + "eval_as_function": self.eval_as_function, + "decorators": self.decorators + } + + def __setstate__(self, state): + self.source = state["source"] + self.filepath = state["filepath"] + self.funcname = state["funcname"] + self.eval_as_function = state["eval_as_function"] + self.decorators = state["decorators"] + + self.func = None + self.package = None + def __eq__(self, other): return (isinstance(other, SourceCode) and other.source == self.source) From a0c7683a452919f210c10e185aa374c594a0a323 Mon Sep 17 00:00:00 2001 From: ajohns Date: Wed, 22 Feb 2017 09:59:12 +1100 Subject: [PATCH 033/124] -added 'num_loaded_packages' stat to contexts, see with rez-context -v --- src/rez/resolved_context.py | 34 +++++++++++++++++++++++++--------- 1 file changed, 25 insertions(+), 9 deletions(-) diff --git a/src/rez/resolved_context.py b/src/rez/resolved_context.py index dc7847c4d..7fc515d9c 100644 --- a/src/rez/resolved_context.py +++ b/src/rez/resolved_context.py @@ -111,7 +111,7 @@ class ResolvedContext(object): command within a configured python namespace, without spawning a child shell. """ - serialize_version = (4, 2) + serialize_version = (4, 3) tmpdir_manager = TempDirs(config.context_tmpdir, prefix="rez_context_") class Callback(object): @@ -221,8 +221,11 @@ def __init__(self, package_requests, verbosity=0, timestamp=None, self.graph_string = None self.graph_ = None self.from_cache = None - self.solve_time = 0.0 # inclusive of load time - self.load_time = 0.0 + + # stats + self.solve_time = 0.0 # total solve time, inclusive of load time + self.load_time = 0.0 # total time loading packages (disk or memcache) + self.num_loaded_packages = 0 # num packages loaded (disk or memcache) # the pre-resolve bindings. We store these because @late package.py # functions need them, and we cache them to avoid cost @@ -238,6 +241,11 @@ def __init__(self, package_requests, verbosity=0, timestamp=None, time_limit=time_limit, callback=callback) + def _package_load_callback(package): + if package_load_callback: + _package_load_callback(package) + self.num_loaded_packages += 1 + request = self.requested_packages(include_implicit=True) resolver = Resolver(context=self, @@ -249,7 +257,7 @@ def __init__(self, package_requests, verbosity=0, timestamp=None, building=self.building, caching=self.caching, callback=callback_, - package_load_callback=package_load_callback, + package_load_callback=_package_load_callback, verbosity=verbosity, buf=buf) resolver.solve() @@ -743,11 +751,12 @@ def _rt(t): _pr() actual_solve_time = self.solve_time - self.load_time _pr("resolve details:", heading) - _pr("load time: %.02f secs" % self.load_time) - _pr("solve time: %.02f secs" % actual_solve_time) - _pr("from cache: %s" % self.from_cache) + _pr("load time: %.02f secs" % self.load_time) + _pr("solve time: %.02f secs" % actual_solve_time) + _pr("packages queried: %d" % self.num_loaded_packages) + _pr("from cache: %s" % self.from_cache) if self.load_path: - _pr("rxt file: %s" % self.load_path) + _pr("rxt file: %s" % self.load_path) if verbosity >= 2: _pr() @@ -1284,9 +1293,11 @@ def to_dict(self): resolved_packages=resolved_packages, failure_description=self.failure_description, graph=graph_str, + from_cache=self.from_cache, solve_time=self.solve_time, - load_time=self.load_time) + load_time=self.load_time, + num_loaded_packages=self.num_loaded_packages) @classmethod def from_dict(cls, d, identifier_str=None): @@ -1342,6 +1353,7 @@ def _print_version(value): r.status_ = ResolverStatus[d["status"]] r.failure_description = d["failure_description"] + r.solve_time = d["solve_time"] r.load_time = d["load_time"] @@ -1392,6 +1404,10 @@ def _print_version(value): else: r.package_orderers = None + # -- SINCE SERIALIZE VERSION 4.3 + + r.num_loaded_packages = d.get("num_loaded_packages", -1) + return r @classmethod From 57126468110737b33771d6c3f97c8487bb3a7774 Mon Sep 17 00:00:00 2001 From: ajohns Date: Wed, 22 Feb 2017 14:13:07 +1100 Subject: [PATCH 034/124] - postprocess -> preprocess, both are correct in different ways but pre is more intuitive - misc minor bugfixes -added package_py_utils module --- src/rez/config.py | 2 +- src/rez/developer_package.py | 54 ++++++------ src/rez/exceptions.py | 2 +- src/rez/package_maker__.py | 2 +- src/rez/package_py_utils.py | 83 +++++++++++++++++++ src/rez/package_resources_.py | 6 +- src/rez/packages_.py | 8 ++ src/rez/rezconfig.py | 18 ++-- src/rez/serialise.py | 33 +++++--- src/support/package_utils/README | 2 +- .../{utils.py => set_authors.py} | 0 11 files changed, 154 insertions(+), 56 deletions(-) create mode 100644 src/rez/package_py_utils.py rename src/support/package_utils/{utils.py => set_authors.py} (100%) diff --git a/src/rez/config.py b/src/rez/config.py index 761ba12d6..84c47b714 100644 --- a/src/rez/config.py +++ b/src/rez/config.py @@ -266,7 +266,7 @@ def _parse_env_var(self, value): "implicit_back": OptionalStr, "alias_fore": OptionalStr, "alias_back": OptionalStr, - "package_postprocess_function": OptionalStr, + "package_preprocess_function": OptionalStr, "build_thread_count": BuildThreadCount_, "resource_caching_maxsize": Int, "max_package_changelog_chars": Int, diff --git a/src/rez/developer_package.py b/src/rez/developer_package.py index fd9e1d581..14baceab6 100644 --- a/src/rez/developer_package.py +++ b/src/rez/developer_package.py @@ -60,8 +60,8 @@ def from_path(cls, path): package = create_package(name, data, package_cls=cls) - # postprocessing - result = package._get_postprocessed(data) + # preprocessing + result = package._get_preprocessed(data) if result: package, data = result @@ -106,10 +106,10 @@ def _validate_includes(self): "@include decorator requests module '%s', but the file " "%s does not exist." % (name, filepath)) - def _get_postprocessed(self, data): + def _get_preprocessed(self, data): """ Returns: - (DeveloperPackage, new_data) 2-tuple IFF the postprocess function + (DeveloperPackage, new_data) 2-tuple IFF the preprocess function changed the package; otherwise None. """ from rez.serialise import process_python_objects @@ -117,23 +117,23 @@ def _get_postprocessed(self, data): from copy import deepcopy with add_sys_paths(config.package_definition_build_python_paths): - postprocess = getattr(self, "postprocess", None) + preprocess = getattr(self, "preprocess", None) - if postprocess: - postprocess_func = postprocess.func - print_info("Applying postprocess from package.py") + if preprocess: + preprocess_func = preprocess.func + print_info("Applying preprocess from package.py") else: - # load globally configured postprocess function - dotted = self.config.package_postprocess_function + # load globally configured preprocess function + dotted = self.config.package_preprocess_function if not dotted: return None if '.' not in dotted: print_error( - "Setting 'package_postprocess_function' must be of " + "Setting 'package_preprocess_function' must be of " "form 'module[.module.module...].funcname'. Package " - "postprocessing has not been applied.") + "preprocessing has not been applied.") return None name, funcname = dotted.rsplit('.', 1) @@ -141,45 +141,45 @@ def _get_postprocessed(self, data): try: module = __import__(name=name, fromlist=[funcname]) except Exception as e: - print_error("Failed to load postprocessing function '%s': %s" + print_error("Failed to load preprocessing function '%s': %s" % (dotted, str(e))) return None setattr(module, "InvalidPackageError", InvalidPackageError) - postprocess_func = getattr(module, funcname) + preprocess_func = getattr(module, funcname) - if not postprocess_func or not isfunction(isfunction): + if not preprocess_func or not isfunction(isfunction): print_error("Function '%s' not found" % dotted) return None - print_info("Applying postprocess function %s" % dotted) + print_info("Applying preprocess function %s" % dotted) - postprocessed_data = deepcopy(data) + preprocessed_data = deepcopy(data) - # apply postprocessing + # apply preprocessing try: - postprocess_func(this=self, data=postprocessed_data) + preprocess_func(this=self, data=preprocessed_data) except InvalidPackageError: raise except Exception as e: - print_error("Failed to apply postprocess: %s: %s" + print_error("Failed to apply preprocess: %s: %s" % (e.__class__.__name__, str(e))) return None - # if postprocess added functions, these need to be converted to + # if preprocess added functions, these need to be converted to # SourceCode instances - postprocessed_data = process_python_objects(postprocessed_data) + preprocessed_data = process_python_objects(preprocessed_data) - if postprocessed_data == data: + if preprocessed_data == data: return None # recreate package from modified package data - package = create_package(self.name, postprocessed_data, + package = create_package(self.name, preprocessed_data, package_cls=self.__class__) # print summary of changed package attributes - added, removed, changed = get_dict_diff(data, postprocessed_data) - lines = ["Package attributes were changed in post processing:"] + added, removed, changed = get_dict_diff(data, preprocessed_data) + lines = ["Package attributes were changed in preprocessing:"] if added: lines.append("Added attributes: %s" @@ -194,4 +194,4 @@ def _get_postprocessed(self, data): txt = '\n'.join(lines) print_info(txt) - return package, postprocessed_data + return package, preprocessed_data diff --git a/src/rez/exceptions.py b/src/rez/exceptions.py index 3dee4e987..870f9a67c 100644 --- a/src/rez/exceptions.py +++ b/src/rez/exceptions.py @@ -160,7 +160,7 @@ class PackageRepositoryError(RezError): class InvalidPackageError(RezError): - """A special case exception used in package 'postprocess function'.""" + """A special case exception used in package 'preprocess function'.""" pass diff --git a/src/rez/package_maker__.py b/src/rez/package_maker__.py index 32280f13d..414be4a9b 100644 --- a/src/rez/package_maker__.py +++ b/src/rez/package_maker__.py @@ -41,7 +41,7 @@ Optional('commands'): _commands_schema, Optional('post_commands'): _commands_schema, - Optional("postprocess"): _function_schema, + Optional("preprocess"): _function_schema, # arbitrary fields Optional(basestring): object diff --git a/src/rez/package_py_utils.py b/src/rez/package_py_utils.py new file mode 100644 index 000000000..b10693b73 --- /dev/null +++ b/src/rez/package_py_utils.py @@ -0,0 +1,83 @@ +""" +This sourcefile is intended to only be imported in package.py files, in +functions including: + +- the special 'preprocess' function; +- early bound functions that use the @early decorator. + +An example of use: + + # in package.py + name = 'mypackage' + + version = '1.2.3' + + @early() + def requires(): + from rez.package_py_utils import expand_requires + + return expand_requires( + 'boost-1.*.*', + 'maya-2017.*' + ) +""" + +# Here to allow 'from rez.package_utils import late' in package.py +from rez.utils.sourcecode import late + +# Here to allow 'from rez.package_utils import InvalidPackageError' in package.py +from rez.exceptions import InvalidPackageError + + +def expand_requires(*requests): + """Create an expanded requirements list. + + Given a list of requirements with optional trailing wildcards, expand each + out to the latest package found within that range. This is useful when a + package is compatible with a version range of a package at build time, but + requires a stricter requirement at runtime. For example, a compiled library + may build with many versions of boost (boost-1.*.*), but once compiled, must + be used with the boost version that has then been linked against (1.55.0). + + Note: + If a package is not found in the given range, it is expanded to the + request as-is, with trailing wildcards removed. + + Example: + + >>> print expand_requires(["boost-1.*.*"]) + ["boost-1.55.0"] + >>> print expand_requires(["boost-1.*"]) + ["boost-1.55"] + + Args: + requests (list of str): Requirements to expand. Each value may have + trailing wildcards. + + Returns: + List of str: Expanded requirements. + """ + from rez.vendor.version.requirement import VersionedObject + from rez.packages_ import get_latest_package + + result = [] + + for request in requests: + txt = request.replace('*', '_') + obj = VersionedObject(txt) + rank = len(obj.version) + + request_ = request + while request_.endswith('*'): + request_ = request_[:-2] # consume sep + * + + package = get_latest_package(request_) + + if package is None: + result.append(request_) + continue + + obj.version_ = package.version.trim(rank) + result.append(str(obj)) + + return result diff --git a/src/rez/package_resources_.py b/src/rez/package_resources_.py index 44ea1a194..4f9d30a24 100644 --- a/src/rez/package_resources_.py +++ b/src/rez/package_resources_.py @@ -26,7 +26,7 @@ # package attributes that we don't install package_build_only_keys = ( - "postprocess", + "preprocess", ) # package attributes that are rex-based functions @@ -118,7 +118,7 @@ def late_bound(schema): # deliberately not possible to late bind Optional("variants"): [[PackageRequest]], - Optional("postprocess"): SourceCode + Optional("preprocess"): SourceCode }) @@ -183,7 +183,7 @@ def late_bound(schema): Optional('commands'): _commands_schema, Optional('post_commands'): _commands_schema, - Optional("postprocess"): _function_schema, + Optional("preprocess"): _function_schema, Optional("timestamp"): int, Optional('revision'): object, diff --git a/src/rez/packages_.py b/src/rez/packages_.py index 0f4336506..b98213370 100644 --- a/src/rez/packages_.py +++ b/src/rez/packages_.py @@ -157,6 +157,14 @@ def _eval_late_binding(self, sourcecode): bindings = self.context._get_pre_resolve_bindings() g.update(bindings) + # note that what 'this' actually points to depends on whether the context + # is available or not. If not, then 'this' is a Package instance; if the + # context is available, it is a Variant instance. So for example, if + # in_context() is True, 'this' will have a 'root' attribute, but will + # not if in_context() is False. + # + g["this"] = self + sourcecode.set_package(self) return sourcecode.exec_(globals_=g) diff --git a/src/rez/rezconfig.py b/src/rez/rezconfig.py index a0b07ace2..ef6dc3dd4 100644 --- a/src/rez/rezconfig.py +++ b/src/rez/rezconfig.py @@ -69,7 +69,7 @@ # These are extra python paths that are added to sys.path **only during a build**. # This means that any of the functions in the following list can import modules # from these paths: -# * The *postprocess* function; +# * The *preprocess* function; # * Any function decorated with @early - these get evaluated at build time. # # You can use this to provide common code to your package definition files during @@ -357,7 +357,7 @@ # scripts (such as .bashrc). If False, package commands are sourced after. package_commands_sourced_first = True -# If you define this function, it will be called as the *postprocess function* +# If you define this function, it will be called as the *preprocess function* # on every package that does not provide its own, as part of the build process. # The given function must be made available by setting the value of # *package_definition_build_python_paths* appropriately. @@ -365,18 +365,18 @@ # For example, consider the settings: # # package_definition_build_python_paths = ["/src/rezutils"] -# package_postprocess_function = "build.validate" +# package_preprocess_function = "build.validate" # # This would use the 'validate' function in the sourcefile /src/rezutils/build.py -# to postprocess every package definition file that does not define its own -# postprocess function. +# to preprocess every package definition file that does not define its own +# preprocess function. # -# If the postprocess function raises an exception, an error message is printed, -# and the postprocessing is not applied to the package. However, if the +# If the preprocess function raises an exception, an error message is printed, +# and the preprocessing is not applied to the package. However, if the # *InvalidPackageError* exception is raised, the build is aborted. # # You would typically use this to perform common validation or modification of -# packages. For example, your common postprocess function might check that the +# packages. For example, your common preprocess function might check that the # package name matches a regex. Here's what that might look like: # # # in /src/rezutils/build.py @@ -388,7 +388,7 @@ # if not regex.match(package.name): # raise InvalidPackageError("Invalid package name.") # -package_postprocess_function = None +package_preprocess_function = None ############################################################################### diff --git a/src/rez/serialise.py b/src/rez/serialise.py index e35c81403..6e9d78b2d 100644 --- a/src/rez/serialise.py +++ b/src/rez/serialise.py @@ -170,13 +170,22 @@ def load_py(stream, filepath=None): return result -def process_python_objects(data, filepath=None): - for k, v in data.iteritems(): - if isfunction(v): - if hasattr(v, "_early"): +def process_python_objects(value, filepath=None): + + def _process(value): + if isinstance(value, dict): + for k, v in value.items(): + value[k] = _process(v) + + return value + elif isfunction(value): + if hasattr(value, "_early"): # run the function now, and replace with return value with add_sys_paths(config.package_definition_build_python_paths): - value = v() + value_ = value() + + # process again in case this is a function returning a function + return _process(value_) else: # if a rex function, the code has to be eval'd NOT as a function, # otherwise the globals dict doesn't get updated with any vars @@ -188,16 +197,14 @@ def process_python_objects(data, filepath=None): # ..won't work. It was never intentional that the above work, but # it does, so now we have to keep it so. # - as_function = (v.__name__ not in package_rex_keys) - - value = SourceCode(func=v, filepath=filepath, - eval_as_function=as_function) + as_function = (value.__name__ not in package_rex_keys) - data[k] = value - elif isinstance(v, dict): - process_python_objects(v, filepath=filepath) + return SourceCode(func=value, filepath=filepath, + eval_as_function=as_function) + else: + return value - return data + return _process(value) def load_yaml(stream, **kwargs): diff --git a/src/support/package_utils/README b/src/support/package_utils/README index 5728c856a..cf11a36bf 100644 --- a/src/support/package_utils/README +++ b/src/support/package_utils/README @@ -1,4 +1,4 @@ This directory contains code that you might want to use for package.py -postprocessing, or import/include in build/runtime functions in you package +preprocessing, or import/include in build/runtime functions in you package definitions. diff --git a/src/support/package_utils/utils.py b/src/support/package_utils/set_authors.py similarity index 100% rename from src/support/package_utils/utils.py rename to src/support/package_utils/set_authors.py From 55315577ba46994fae96c90546a321e4df6bdb06 Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 25 Feb 2017 11:25:07 +1100 Subject: [PATCH 035/124] -added native support for expanding 'pkg-*' requirements lists -added rez-build --view-pre option -fix for case where @included module used both in developer pkg and installed pkg --- src/rez/cli/build.py | 8 ++++ src/rez/package_maker__.py | 6 ++- src/rez/package_py_utils.py | 89 ++++++++++++++++--------------------- src/rez/packages_.py | 37 ++++++++++----- src/rez/utils/sourcecode.py | 38 +++++++++++++--- 5 files changed, 107 insertions(+), 71 deletions(-) diff --git a/src/rez/cli/build.py b/src/rez/cli/build.py index 7960c6bf7..4c1988cc5 100644 --- a/src/rez/cli/build.py +++ b/src/rez/cli/build.py @@ -59,6 +59,9 @@ def setup_parser(parser, completions=False): help="create build scripts rather than performing the full build. " "Running these scripts will place you into a build environment, where " "you can invoke the build system directly.") + parser.add_argument( + "--view-pre", action="store_true", + help="just view the preprocessed package definition, and exit.") setup_parser_common(parser) @@ -85,12 +88,17 @@ def command(opts, parser, extra_arg_groups=None): from rez.packages_ import get_developer_package from rez.build_process_ import create_build_process from rez.build_system import create_build_system + from rez.serialise import FileFormat import sys # load package working_dir = os.getcwd() package = get_developer_package(working_dir) + if opts.view_pre: + package.print_info(format_=FileFormat.py, skip_attributes=["preprocess"]) + sys.exit(0) + # create build system build_args, child_build_args = get_build_args(opts, parser, extra_arg_groups) buildsys_type = opts.buildsys if ("buildsys" in opts) else None diff --git a/src/rez/package_maker__.py b/src/rez/package_maker__.py index 414be4a9b..ccd0c5798 100644 --- a/src/rez/package_maker__.py +++ b/src/rez/package_maker__.py @@ -7,13 +7,17 @@ _function_schema, late_bound from rez.package_repository import create_memory_package_repository from rez.packages_ import Package +from rez.package_py_utils import expand_requirement from rez.vendor.schema.schema import Schema, Optional, Or, Use, And from rez.vendor.version.version import Version from contextlib import contextmanager import os -package_request_schema = Or(basestring, +# this schema will automatically harden request strings like 'python-*'; see +# the 'expand_requires' function for more info. +# +package_request_schema = Or(And(basestring, Use(expand_requirement)), And(PackageRequest, Use(str))) diff --git a/src/rez/package_py_utils.py b/src/rez/package_py_utils.py index b10693b73..bd1ad9ba1 100644 --- a/src/rez/package_py_utils.py +++ b/src/rez/package_py_utils.py @@ -4,44 +4,54 @@ - the special 'preprocess' function; - early bound functions that use the @early decorator. +""" -An example of use: +# these imports just forward the symbols into this module's namespace +from rez.utils.sourcecode import late +from rez.exceptions import InvalidPackageError - # in package.py - name = 'mypackage' - version = '1.2.3' +def expand_requirement(request): + """Expands a requirement string like 'python-2.*' - @early() - def requires(): - from rez.package_py_utils import expand_requires + Only trailing wildcards are supported; they will be replaced with the + latest package version found within the range. If none are found, the + wildcards will just be stripped. - return expand_requires( - 'boost-1.*.*', - 'maya-2017.*' - ) -""" + Example: -# Here to allow 'from rez.package_utils import late' in package.py -from rez.utils.sourcecode import late + >>> print expand_requirement('python-2.*') + python-2.7 -# Here to allow 'from rez.package_utils import InvalidPackageError' in package.py -from rez.exceptions import InvalidPackageError + Args: + request (str): Request to expand, eg 'python-2.*' + Returns: + str: Expanded request string. + """ + from rez.vendor.version.requirement import VersionedObject, Requirement + from rez.packages_ import get_latest_package -def expand_requires(*requests): - """Create an expanded requirements list. + txt = request.replace('*', '_') + obj = VersionedObject(txt) + rank = len(obj.version) + + request_ = request + while request_.endswith('*'): + request_ = request_[:-2] # strip sep + * - Given a list of requirements with optional trailing wildcards, expand each - out to the latest package found within that range. This is useful when a - package is compatible with a version range of a package at build time, but - requires a stricter requirement at runtime. For example, a compiled library - may build with many versions of boost (boost-1.*.*), but once compiled, must - be used with the boost version that has then been linked against (1.55.0). + req = Requirement(request_) + package = get_latest_package(name=req.name, range_=req.range_) - Note: - If a package is not found in the given range, it is expanded to the - request as-is, with trailing wildcards removed. + if package is None: + return request_ + + obj.version_ = package.version.trim(rank) + return str(obj) + + +def expand_requires(*requests): + """Create an expanded requirements list. Example: @@ -57,27 +67,4 @@ def expand_requires(*requests): Returns: List of str: Expanded requirements. """ - from rez.vendor.version.requirement import VersionedObject - from rez.packages_ import get_latest_package - - result = [] - - for request in requests: - txt = request.replace('*', '_') - obj = VersionedObject(txt) - rank = len(obj.version) - - request_ = request - while request_.endswith('*'): - request_ = request_[:-2] # consume sep + * - - package = get_latest_package(request_) - - if package is None: - result.append(request_) - continue - - obj.version_ = package.version.trim(rank) - result.append(str(obj)) - - return result + return [expand_requirement(x) for x in requests] diff --git a/src/rez/packages_.py b/src/rez/packages_.py index b98213370..6dd176e8e 100644 --- a/src/rez/packages_.py +++ b/src/rez/packages_.py @@ -3,6 +3,7 @@ VariantResource, package_family_schema, package_schema, variant_schema, \ package_release_keys, late_requires_schema from rez.package_serialise import dump_package_data +from rez.utils import reraise from rez.utils.logging_ import print_info, print_error from rez.utils.sourcecode import SourceCode from rez.utils.data_utils import cached_property, _missing @@ -234,7 +235,7 @@ def iter_variants(self): """ repo = self.resource._repository for variant in repo.iter_variants(self.resource): - yield Variant(variant, context=self.context) + yield Variant(variant, context=self.context, parent=self) def get_variant(self, index=None): """Get the variant with the associated index. @@ -257,9 +258,10 @@ class Variant(PackageBaseResourceWrapper): keys = schema_keys(variant_schema) keys.update(["index", "root", "subpath"]) - def __init__(self, resource, context=None): + def __init__(self, resource, context=None, parent=None): _check_class(resource, VariantResource) super(Variant, self).__init__(resource, context) + self._parent = parent # arbitrary keys def __getattr__(self, name): @@ -293,24 +295,35 @@ def parent(self): Returns: `Package`. """ - repo = self.resource._repository - package = repo.get_parent_package(self.resource) - return Package(package, context=self.context) + if self._parent is not None: + return self._parent + + try: + repo = self.resource._repository + package = repo.get_parent_package(self.resource) + self._parent = Package(package, context=self.context) + except AttributeError as e: + reraise(e, ValueError) + + return self._parent @property def requires(self): """Get variant requirements. - This is a concatenation of the package requirements and those if this + This is a concatenation of the package requirements and those of this specific variant. """ - package_requires = self.parent.requires or [] + try: + package_requires = self.parent.requires or [] - if self.index is None: - return package_requires - else: - variant_requires = self.parent.variants[self.index] or [] - return package_requires + variant_requires + if self.index is None: + return package_requires + else: + variant_requires = self.parent.variants[self.index] or [] + return package_requires + variant_requires + except AttributeError as e: + reraise(e, ValueError) def get_requires(self, build_requires=False, private_build_requires=False): """Get the requirements of the variant. diff --git a/src/rez/utils/sourcecode.py b/src/rez/utils/sourcecode.py index 7dda062d2..95c7dbc52 100644 --- a/src/rez/utils/sourcecode.py +++ b/src/rez/utils/sourcecode.py @@ -293,16 +293,40 @@ def __init__(self): def load_module(self, name, package): from rez.config import config # avoiding circular import + from rez.developer_package import DeveloperPackage - path = os.path.join(package.base, self.include_modules_subpath) - pathname = os.path.join(path, "%s-*.py" % name) + # in rare cases, a @late bound function may get called before the + # package is built. An example is 'requires' and the other requires-like + # functions. These need to be evaluated before a build, but it does also + # make sense to sometimes implement these as late-bound functions. We + # detect this case here, and load the modules from the original (pre- + # copied into package payload) location. + # + if isinstance(package, DeveloperPackage): + from hashlib import sha1 - pathnames = glob(pathname) - if not pathnames: - return None + # load sourcefile from original location + path = config.package_definition_python_path + filepath = os.path.join(path, "%s.py" % name) + + if not os.path.exists(filepath): + return None + + with open(filepath) as f: + txt = f.read().strip() + + hash_str = sha1(txt).hexdigest() + else: + # load sourcefile that's been copied into package install payload + path = os.path.join(package.base, self.include_modules_subpath) + pathname = os.path.join(path, "%s-*.py" % name) + + pathnames = glob(pathname) + if not pathnames: + return None - filepath = pathnames[0] - hash_str = filepath.rsplit('-', 1)[-1].split('.', 1)[0] + filepath = pathnames[0] + hash_str = filepath.rsplit('-', 1)[-1].split('.', 1)[0] module = self.modules.get(hash_str) if module is not None: From 2a732f3f792c0c177c63410ae5e4f08fdd70e13e Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 25 Feb 2017 15:28:31 +1100 Subject: [PATCH 036/124] -minor update to give @early funcs access to pkg data; -docstring update --- src/rez/rezconfig.py | 6 ++++-- src/rez/serialise.py | 18 ++++++++++++++---- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/src/rez/rezconfig.py b/src/rez/rezconfig.py index ef6dc3dd4..a6b33e909 100644 --- a/src/rez/rezconfig.py +++ b/src/rez/rezconfig.py @@ -101,7 +101,8 @@ # This package will import the code from */src/rezutils/utils.py* (or more # specifically, its copy of this sourcefile) and will bind it to the name *utils*. # -# For further information, see [here](Package-Definition-Guide#using-shared-code). +# For further information, see +# [here](Package-Definition-Guide#sharing-code-across-package-definition-files). # package_definition_python_path = None @@ -360,7 +361,8 @@ # If you define this function, it will be called as the *preprocess function* # on every package that does not provide its own, as part of the build process. # The given function must be made available by setting the value of -# *package_definition_build_python_paths* appropriately. +# [package_definition_build_python_paths](#package_definition_build_python_paths) +# appropriately. # # For example, consider the settings: # diff --git a/src/rez/serialise.py b/src/rez/serialise.py index 6e9d78b2d..fb4eca0da 100644 --- a/src/rez/serialise.py +++ b/src/rez/serialise.py @@ -13,7 +13,7 @@ from rez.vendor.enum import Enum from rez.vendor import yaml from contextlib import contextmanager -from inspect import isfunction +from inspect import isfunction, getargspec from StringIO import StringIO import sys import os @@ -170,7 +170,7 @@ def load_py(stream, filepath=None): return result -def process_python_objects(value, filepath=None): +def process_python_objects(data, filepath=None): def _process(value): if isinstance(value, dict): @@ -182,7 +182,17 @@ def _process(value): if hasattr(value, "_early"): # run the function now, and replace with return value with add_sys_paths(config.package_definition_build_python_paths): - value_ = value() + func = value + + spec = getargspec(func) + args = spec.args or [] + if len(args) not in (0, 1): + raise ResourceError("@early decorated function must " + "take zero or one args only") + if args: + value_ = func(data) + else: + value_ = func() # process again in case this is a function returning a function return _process(value_) @@ -204,7 +214,7 @@ def _process(value): else: return value - return _process(value) + return _process(data) def load_yaml(stream, **kwargs): From 936767e36720351a0bbea855935b45d5a0e0b62f Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 28 Feb 2017 13:27:04 +1100 Subject: [PATCH 037/124] -minor change, moved copy_and_replace() to rez.utils.filesystem --- src/rez/utils/filesystem.py | 33 ++++++++++++++++++++++++++ src/rezplugins/build_process/local.py | 34 +-------------------------- 2 files changed, 34 insertions(+), 33 deletions(-) diff --git a/src/rez/utils/filesystem.py b/src/rez/utils/filesystem.py index 082914641..9a6865c1b 100644 --- a/src/rez/utils/filesystem.py +++ b/src/rez/utils/filesystem.py @@ -101,6 +101,39 @@ def is_subdirectory(path_a, path_b): return (not relative.startswith(os.pardir + os.sep)) +def copy_or_replace(src, dst): + '''try to copy with mode, and if it fails, try replacing + ''' + try: + shutil.copy(src, dst) + except (OSError, IOError), e: + # It's possible that the file existed, but was owned by someone + # else - in that situation, shutil.copy might then fail when it + # tries to copy perms. + # However, it's possible that we have write perms to the dir - + # in which case, we can just delete and replace + import errno + + if e.errno == errno.EPERM: + import tempfile + # try copying into a temporary location beside the old + # file - if we have perms to do that, we should have perms + # to then delete the old file, and move the new one into + # place + if os.path.isdir(dst): + dst = os.path.join(dst, os.path.basename(src)) + + dst_dir, dst_name = os.path.split(dst) + dst_temp = tempfile.mktemp(prefix=dst_name + '.', dir=dst_dir) + shutil.copy(src, dst_temp) + if not os.path.isfile(dst_temp): + raise RuntimeError( + "shutil.copy completed successfully, but path" + " '%s' still did not exist" % dst_temp) + os.remove(dst) + shutil.move(dst_temp, dst) + + def copytree(src, dst, symlinks=False, ignore=None, hardlinks=False): '''copytree that supports hard-linking ''' diff --git a/src/rezplugins/build_process/local.py b/src/rezplugins/build_process/local.py index d89b093e5..aaba148ad 100644 --- a/src/rezplugins/build_process/local.py +++ b/src/rezplugins/build_process/local.py @@ -7,7 +7,7 @@ from rez.release_hook import ReleaseHookEvent from rez.exceptions import BuildError, ReleaseError from rez.utils.colorize import Printer, warning -from rez.utils.filesystem import safe_makedirs +from rez.utils.filesystem import safe_makedirs, copy_or_replace from rez.utils.sourcecode import IncludeModuleManager from hashlib import sha1 import shutil @@ -145,38 +145,6 @@ def _build_variant_base(self, variant, build_type, install_path=None, if not build_result.get("success"): raise BuildError("The %s build system failed" % build_system_name) - def copy_or_replace(src, dst): - '''try to copy with mode, and if it fails, try replacing - ''' - try: - shutil.copy(src, dst) - except (OSError, IOError), e: - # It's possible that the file existed, but was owned by someone - # else - in that situation, shutil.copy might then fail when it - # tries to copy perms. - # However, it's possible that we have write perms to the dir - - # in which case, we can just delete and replace - import errno - if e.errno == errno.EPERM: - import tempfile - # try copying into a temporary location beside the old - # file - if we have perms to do that, we should have perms - # to then delete the old file, and move the new one into - # place - if os.path.isdir(dst): - dst = os.path.join(dst, os.path.basename(src)) - - dst_dir, dst_name = os.path.split(dst) - dst_temp = tempfile.mktemp(prefix=dst_name + '.', - dir=dst_dir) - shutil.copy(src, dst_temp) - if not os.path.isfile(dst_temp): - raise RuntimeError( - "shutil.copy completed successfully, but path" - " '%s' still did not exist" % dst_temp) - os.remove(dst) - shutil.move(dst_temp, dst) - if install: # install some files for debugging purposes extra_files = build_result.get("extra_files", []) + [rxt_filepath] From 80c8ef6b2f38713945cbe3960b0ad6be50c4f279 Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 28 Feb 2017 13:27:31 +1100 Subject: [PATCH 038/124] -version up --- src/rez/utils/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index 4a67f9917..d60dcd84e 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.5.0" +_rez_version = "2.5.1" # Copyright 2013-2016 Allan Johns. From 70ed6d1e7d6bf661ac3582ef8e3957c229a649af Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 28 Feb 2017 13:51:18 +1100 Subject: [PATCH 039/124] -made 'context' optional in Solver, fixes some tests --- src/rez/resolver.py | 4 ++-- src/rez/solver.py | 13 ++++++++----- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/src/rez/resolver.py b/src/rez/resolver.py index 15f4540ec..49b8cbbfe 100644 --- a/src/rez/resolver.py +++ b/src/rez/resolver.py @@ -364,9 +364,9 @@ def _memcache_key(self, timestamped=False): return str(tuple(t)) def _solve(self): - solver = Solver(context=self.context, - package_requests=self.package_requests, + solver = Solver(package_requests=self.package_requests, package_paths=self.package_paths, + context=self.context, package_filter=self.package_filter, package_orderers=self.package_orderers, callback=self.callback, diff --git a/src/rez/solver.py b/src/rez/solver.py index ff58456ed..ca15000c2 100644 --- a/src/rez/solver.py +++ b/src/rez/solver.py @@ -1754,16 +1754,19 @@ class Solver(_Common): """ max_verbosity = 3 - def __init__(self, context, package_requests, package_paths, package_filter=None, - package_orderers=None, callback=None, building=False, - optimised=True, verbosity=0, buf=None, package_load_callback=None, - prune_unfailed=True): + def __init__(self, package_requests, package_paths, context=None, + package_filter=None, package_orderers=None, callback=None, + building=False, optimised=True, verbosity=0, buf=None, + package_load_callback=None, prune_unfailed=True): """Create a Solver. Args: package_requests: List of Requirement objects representing the request. package_paths: List of paths to search for pkgs. + context (`ResolvedContext`): Context this solver is used within, if + any. This is needed in a solve if any packages contain late + binding package attributes that need access to context info. package_filter (`PackageFilterBase`): Filter for excluding packages. package_orderers (list of `PackageOrder`): Custom package ordering. building: True if we're resolving for a build. @@ -1784,7 +1787,6 @@ def __init__(self, context, package_requests, package_paths, package_filter=None True, any packages unrelated to the conflict are removed from the graph. """ - self.context = context self.package_paths = package_paths self.package_filter = package_filter self.package_orderers = package_orderers @@ -1795,6 +1797,7 @@ def __init__(self, context, package_requests, package_paths, package_filter=None self.package_load_callback = package_load_callback self.building = building self.request_list = None + self.context = context self.non_conflict_package_requests = [x for x in package_requests if not x.conflict] From 82585070c760e195e8866991a61dec7189128f23 Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 28 Feb 2017 15:54:58 +1100 Subject: [PATCH 040/124] -fixed most tests, one fail left --- src/rez/package_py_utils.py | 3 +++ src/rez/package_resources_.py | 7 +------ src/rez/rex.py | 8 ++++++++ src/rez/tests/test_packages.py | 9 +++++---- src/rez/utils/data_utils.py | 2 +- src/rezplugins/package_repository/filesystem.py | 8 ++++++-- 6 files changed, 24 insertions(+), 13 deletions(-) diff --git a/src/rez/package_py_utils.py b/src/rez/package_py_utils.py index bd1ad9ba1..a1066ee22 100644 --- a/src/rez/package_py_utils.py +++ b/src/rez/package_py_utils.py @@ -29,6 +29,9 @@ def expand_requirement(request): Returns: str: Expanded request string. """ + if '*' not in request: + return request + from rez.vendor.version.requirement import VersionedObject, Requirement from rez.packages_ import get_latest_package diff --git a/src/rez/package_resources_.py b/src/rez/package_resources_.py index 4f9d30a24..feaada688 100644 --- a/src/rez/package_resources_.py +++ b/src/rez/package_resources_.py @@ -393,7 +393,7 @@ class _Metas(AttributeForwardMeta, LazyAttributeMeta): pass schema = variant_schema # forward Package attributes onto ourself - keys = schema_keys(package_schema) - set(["requires", "variants"]) + keys = schema_keys(package_schema) - set(["variants"]) def _uri(self): index = self.index @@ -418,11 +418,6 @@ def _root(self): root = os.path.join(self.base, self.subpath) return root - #@cached_property - #def requires(self): - # reqs = self.parent.requires or [] - # return reqs + self.variant_requires - @cached_property def variant_requires(self): index = self.index diff --git a/src/rez/rex.py b/src/rez/rex.py index 5b70556f9..08760cf17 100644 --- a/src/rez/rex.py +++ b/src/rez/rex.py @@ -1158,6 +1158,8 @@ def compile_code(cls, code, filename=None, exec_namespace=None): code.exec_(globals_=exec_namespace) else: exec pyc in exec_namespace + except RexError: + raise except SourceCodeError as e: reraise(e, RexError) except error_class as e: @@ -1195,8 +1197,14 @@ def execute_function(self, func, *nargs, **kwargs): try: return fn(*nargs, **kwargs) + except RexError: + raise except error_class as e: + from inspect import getfile + stack = traceback.format_exc() + filename = getfile(func) + raise RexError("Failed to exec %s:\n\n%s" % (filename, stack)) def get_output(self, style=OutputStyle.file): diff --git a/src/rez/tests/test_packages.py b/src/rez/tests/test_packages.py index 836bd39ee..d17d0daef 100644 --- a/src/rez/tests/test_packages.py +++ b/src/rez/tests/test_packages.py @@ -184,20 +184,21 @@ def test_5(self): def test_6(self): """test variant iteration.""" - expected_data_ = dict( + expected_data = dict( name="variants_py", version=Version("2.0"), description="package with variants", base=os.path.join(self.py_packages_path, "variants_py", "2.0"), + requires=[PackageRequest("python-2.7")], commands=SourceCode('env.PATH.append("{root}/bin")')) requires_ = ["platform-linux", "platform-osx"] package = get_package("variants_py", "2.0") for i, variant in enumerate(package.iter_variants()): - expected_data = expected_data_.copy() - expected_data["requires"] = [PackageRequest('python-2.7'), - PackageRequest(requires_[i])] + #expected_data = expected_data_.copy() + #expected_data["requires"] = [PackageRequest('python-2.7'), + # PackageRequest(requires_[i])] data = variant.validated_data() self.assertDictEqual(data, expected_data) self.assertEqual(variant.index, i) diff --git a/src/rez/utils/data_utils.py b/src/rez/utils/data_utils.py index 3b187b174..0965d68f4 100644 --- a/src/rez/utils/data_utils.py +++ b/src/rez/utils/data_utils.py @@ -465,7 +465,7 @@ def func(self, key, attr, schema): @classmethod def _make_getter(cls, key, attribute, optional, key_schema): def getter(self): - if key not in self._data: + if key not in (self._data or {}): if optional: return None raise self.schema_error("Required key is missing: %r" % key) diff --git a/src/rezplugins/package_repository/filesystem.py b/src/rezplugins/package_repository/filesystem.py index 58ecb040c..4b574589a 100644 --- a/src/rezplugins/package_repository/filesystem.py +++ b/src/rezplugins/package_repository/filesystem.py @@ -760,11 +760,15 @@ def _create_variant(self, variant, dry_run=False, overrides=None): new_package_data.pop("variants", None) package_changed = False - for key in package_build_only_keys: - new_package_data.pop(key, None) + def remove_build_keys(obj): + for key in package_build_only_keys: + obj.pop(key, None) + + remove_build_keys(new_package_data) if existing_package: existing_package_data = existing_package.validated_data() + remove_build_keys(existing_package_data) # detect case where new variant introduces package changes outside of variant data_1 = existing_package_data.copy() From b4b42490811bf0585006a4ac8dbc31faf488899f Mon Sep 17 00:00:00 2001 From: ajohns Date: Wed, 1 Mar 2017 14:22:32 +1100 Subject: [PATCH 041/124] -fixed bug in merge of PR #354 --- src/rezplugins/build_process/local.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/rezplugins/build_process/local.py b/src/rezplugins/build_process/local.py index b8cc3af0f..6af96f976 100644 --- a/src/rezplugins/build_process/local.py +++ b/src/rezplugins/build_process/local.py @@ -122,7 +122,7 @@ def _build_variant_base(self, variant, build_type, install_path=None, pkg_repo = package_repository_manager.get_repository(install_path) pkg_repo.pre_variant_install(variant.resource) - if os.path.exists(variant_install_path): + if not os.path.exists(variant_install_path): safe_makedirs(variant_install_path) # create build environment From 7551f99678f359bf33a7e80461267d2652131433 Mon Sep 17 00:00:00 2001 From: ajohns Date: Wed, 1 Mar 2017 14:26:04 +1100 Subject: [PATCH 042/124] version up --- src/rez/utils/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index 606e06dc9..bb48ebd74 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.5.2" +_rez_version = "2.5.3" # Copyright 2013-2016 Allan Johns. From 31760f0ae6b954b94cc59f971ea01261f833c57f Mon Sep 17 00:00:00 2001 From: ajohns Date: Wed, 1 Mar 2017 14:59:38 +1100 Subject: [PATCH 043/124] -added some tests for new package.py features --- .../packages/developer_dynamic/package.py | 9 +++++++ .../py_packages/late_binding/1.0/package.py | 26 +++++++++++++++++++ src/rez/tests/test_packages.py | 18 ++++++++++--- 3 files changed, 49 insertions(+), 4 deletions(-) create mode 100644 src/rez/tests/data/packages/developer_dynamic/package.py create mode 100644 src/rez/tests/data/packages/py_packages/late_binding/1.0/package.py diff --git a/src/rez/tests/data/packages/developer_dynamic/package.py b/src/rez/tests/data/packages/developer_dynamic/package.py new file mode 100644 index 000000000..25d2638c3 --- /dev/null +++ b/src/rez/tests/data/packages/developer_dynamic/package.py @@ -0,0 +1,9 @@ +name = "developer_dynamic" + +@early() +def description(): + return "This." + +requires = [ + "versioned-*" +] diff --git a/src/rez/tests/data/packages/py_packages/late_binding/1.0/package.py b/src/rez/tests/data/packages/py_packages/late_binding/1.0/package.py new file mode 100644 index 000000000..a74b55511 --- /dev/null +++ b/src/rez/tests/data/packages/py_packages/late_binding/1.0/package.py @@ -0,0 +1,26 @@ +name = 'late_binding' + +version = "1.0" + +@late() +def tools(): + return ["util"] + +def commands(): + env.PATH.append("{root}/bin") + + +# Copyright 2013-2016 Allan Johns. +# +# This library is free software: you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation, either +# version 3 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library. If not, see . diff --git a/src/rez/tests/test_packages.py b/src/rez/tests/test_packages.py index d17d0daef..e5da8a4df 100644 --- a/src/rez/tests/test_packages.py +++ b/src/rez/tests/test_packages.py @@ -28,13 +28,14 @@ 'pysplit-5', 'pysplit-6', 'pysplit-7', 'python-2.5.2', 'python-2.6.0', 'python-2.6.8', 'python-2.7.0', 'pyvariants-2', - # packages from data/packages + # packages from data/packages/py_packages and .../yaml_packages 'unversioned', 'unversioned_py', 'versioned-1.0', 'versioned-2.0', 'versioned-3.0', 'variants_py-2.0', 'single_unversioned', 'single_versioned-3.5', + 'late_binding-1.0', 'multi-1.0', 'multi-1.1', 'multi-1.2', 'multi-2.0']) @@ -120,6 +121,10 @@ def test_3(self): "versioned", "2.0", "package.yaml") self.assertEqual(package.uri, expected_uri) + # a py-based package with late binding attribute functions + package = get_package("late_binding", "1.0") + self.assertEqual(package.tools, ["util"]) + # a 'combined' type package package = get_package("multi", "1.0") expected_uri = os.path.join(self.yaml_packages_path, "multi.yaml<1.0>") @@ -182,6 +187,14 @@ def test_5(self): data = package.validated_data() self.assertDictEqual(data, expected_data) + # a developer package with features such as expanding requirements + # and early-binding attribute functions + path = os.path.join(self.packages_base_path, "developer_dynamic") + package = get_developer_package(path) + + self.assertEqual(package.description, "This.") + self.assertEqual(package.requires, [PackageRequest('versioned-3')]) + def test_6(self): """test variant iteration.""" expected_data = dict( @@ -196,9 +209,6 @@ def test_6(self): package = get_package("variants_py", "2.0") for i, variant in enumerate(package.iter_variants()): - #expected_data = expected_data_.copy() - #expected_data["requires"] = [PackageRequest('python-2.7'), - # PackageRequest(requires_[i])] data = variant.validated_data() self.assertDictEqual(data, expected_data) self.assertEqual(variant.index, i) From 1aac3ee40c82aaa9e1ae678a65c5ab41220b8b96 Mon Sep 17 00:00:00 2001 From: ajohns Date: Wed, 1 Mar 2017 15:21:24 +1100 Subject: [PATCH 044/124] -added testing for preprocessing, and build-time package.py code sharing --- .../tests/data/packages/developer_dynamic/package.py | 4 ++++ src/rez/tests/data/python/early_bind/__init__.py | 0 src/rez/tests/data/python/early_bind/early_utils.py | 6 ++++++ src/rez/tests/test_packages.py | 10 ++++++++-- 4 files changed, 18 insertions(+), 2 deletions(-) create mode 100644 src/rez/tests/data/python/early_bind/__init__.py create mode 100644 src/rez/tests/data/python/early_bind/early_utils.py diff --git a/src/rez/tests/data/packages/developer_dynamic/package.py b/src/rez/tests/data/packages/developer_dynamic/package.py index 25d2638c3..4fd493e72 100644 --- a/src/rez/tests/data/packages/developer_dynamic/package.py +++ b/src/rez/tests/data/packages/developer_dynamic/package.py @@ -7,3 +7,7 @@ def description(): requires = [ "versioned-*" ] + +def preprocess(this, data): + from early_utils import get_authors + data["authors"] = get_authors() diff --git a/src/rez/tests/data/python/early_bind/__init__.py b/src/rez/tests/data/python/early_bind/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/rez/tests/data/python/early_bind/early_utils.py b/src/rez/tests/data/python/early_bind/early_utils.py new file mode 100644 index 000000000..7414ef04d --- /dev/null +++ b/src/rez/tests/data/python/early_bind/early_utils.py @@ -0,0 +1,6 @@ + +def get_authors(): + return [ + "tweedle-dee", + "tweedle-dum" + ] diff --git a/src/rez/tests/test_packages.py b/src/rez/tests/test_packages.py index e5da8a4df..bc1e60f91 100644 --- a/src/rez/tests/test_packages.py +++ b/src/rez/tests/test_packages.py @@ -61,10 +61,15 @@ def setUpClass(cls): cls.yaml_packages_path = os.path.join(cls.packages_base_path, "yaml_packages") cls.py_packages_path = os.path.join(cls.packages_base_path, "py_packages") + cls.package_definition_build_python_paths = [ + os.path.join(path, "data", "python", "early_bind") + ] + cls.settings = dict( packages_path=[cls.solver_packages_path, cls.yaml_packages_path, cls.py_packages_path], + package_definition_build_python_paths=cls.package_definition_build_python_paths, package_filter=None) @classmethod @@ -187,13 +192,14 @@ def test_5(self): data = package.validated_data() self.assertDictEqual(data, expected_data) - # a developer package with features such as expanding requirements - # and early-binding attribute functions + # a developer package with features such as expanding requirements, + # early-binding attribute functions, and preprocessing path = os.path.join(self.packages_base_path, "developer_dynamic") package = get_developer_package(path) self.assertEqual(package.description, "This.") self.assertEqual(package.requires, [PackageRequest('versioned-3')]) + self.assertEqual(package.authors, ["tweedle-dee", "tweedle-dum"]) def test_6(self): """test variant iteration.""" From 88abec66ffc0dae585be615f6bc67d68af46dcd6 Mon Sep 17 00:00:00 2001 From: ajohns Date: Thu, 2 Mar 2017 11:38:36 +1100 Subject: [PATCH 045/124] -added REZ_BUILD_REQUIRES envvar -made list of packages printed during rez-build quoted, so they can be cut+pasted to a rez-env call for debugging --- src/rez/build_process_.py | 6 ++++-- src/rez/build_system.py | 4 +++- src/rez/utils/_version.py | 2 +- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/rez/build_process_.py b/src/rez/build_process_.py index 53bb5b57a..c0c565d50 100644 --- a/src/rez/build_process_.py +++ b/src/rez/build_process_.py @@ -9,6 +9,7 @@ from rez.config import config from rez.vendor.enum import Enum from contextlib import contextmanager +from pipes import quote import getpass import os.path @@ -199,8 +200,9 @@ def create_build_context(self, variant, build_type, build_path): request = variant.get_requires(build_requires=True, private_build_requires=True) - requests_str = ' '.join(map(str, request)) - self._print("Resolving build environment: %s", requests_str) + req_strs = map(str, request) + quoted_req_strs = map(quote, req_strs) + self._print("Resolving build environment: %s", ' '.join(quoted_req_strs)) if build_type == BuildType.local: packages_path = self.package.config.packages_path diff --git a/src/rez/build_system.py b/src/rez/build_system.py index 00f23edca..614a004fa 100644 --- a/src/rez/build_system.py +++ b/src/rez/build_system.py @@ -166,7 +166,9 @@ def get_standard_vars(cls, context, variant, build_type, install): 'REZ_BUILD_PROJECT_NAME': package.name, 'REZ_BUILD_PROJECT_DESCRIPTION': \ (package.description or '').strip(), - 'REZ_BUILD_PROJECT_FILE': getattr(variant, 'filepath', ''), + 'REZ_BUILD_PROJECT_FILE': getattr(variant.parent, 'filepath', ''), + 'REZ_BUILD_REQUIRES': \ + ' '.join(str(x) for x in context.requested_packages(True)), 'REZ_BUILD_REQUIRES_UNVERSIONED': \ ' '.join(x.name for x in context.requested_packages(True)), 'REZ_BUILD_TYPE': build_type.name, diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index bb48ebd74..848003c45 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.5.3" +_rez_version = "2.6.0" # Copyright 2013-2016 Allan Johns. From a95d1782fbb03b3aff690f84428f7470f1bea351 Mon Sep 17 00:00:00 2001 From: ajohns Date: Thu, 2 Mar 2017 13:47:25 +1100 Subject: [PATCH 046/124] fixed broken tests --- src/rez/build_system.py | 2 +- src/rez/resolved_context.py | 23 ++++++++--------------- src/rez/tests/test_commands.py | 15 +++++++++++++++ src/rezplugins/build_system/cmake.py | 2 +- 4 files changed, 25 insertions(+), 17 deletions(-) diff --git a/src/rez/build_system.py b/src/rez/build_system.py index 614a004fa..8d1257baa 100644 --- a/src/rez/build_system.py +++ b/src/rez/build_system.py @@ -166,7 +166,7 @@ def get_standard_vars(cls, context, variant, build_type, install): 'REZ_BUILD_PROJECT_NAME': package.name, 'REZ_BUILD_PROJECT_DESCRIPTION': \ (package.description or '').strip(), - 'REZ_BUILD_PROJECT_FILE': getattr(variant.parent, 'filepath', ''), + 'REZ_BUILD_PROJECT_FILE': getattr(package, 'filepath', ''), 'REZ_BUILD_REQUIRES': \ ' '.join(str(x) for x in context.requested_packages(True)), 'REZ_BUILD_REQUIRES_UNVERSIONED': \ diff --git a/src/rez/resolved_context.py b/src/rez/resolved_context.py index e3341884d..ab9d79d11 100644 --- a/src/rez/resolved_context.py +++ b/src/rez/resolved_context.py @@ -1521,27 +1521,20 @@ def _minor_heading(txt): for pkg in resolved_pkgs: _minor_heading("variables for package %s" % pkg.qualified_name) prefix = "REZ_" + pkg.name.upper().replace('.', '_') + executor.setenv(prefix + "_VERSION", str(pkg.version)) - major_version = pkg.version[0] if len(pkg.version) >= 1 else '' - minor_version = pkg.version[1] if len(pkg.version) >= 2 else '' - patch_version = pkg.version[2] if len(pkg.version) >= 3 else '' - executor.setenv(prefix + "_MAJOR_VERSION", str(major_version)) - executor.setenv(prefix + "_MINOR_VERSION", str(minor_version)) - executor.setenv(prefix + "_PATCH_VERSION", str(patch_version)) + major_version = str(pkg.version[0] if len(pkg.version) >= 1 else '') + minor_version = str(pkg.version[1] if len(pkg.version) >= 2 else '') + patch_version = str(pkg.version[2] if len(pkg.version) >= 3 else '') + executor.setenv(prefix + "_MAJOR_VERSION", major_version) + executor.setenv(prefix + "_MINOR_VERSION", minor_version) + executor.setenv(prefix + "_PATCH_VERSION", patch_version) + executor.setenv(prefix + "_BASE", pkg.base) executor.setenv(prefix + "_ROOT", pkg.root) bindings[pkg.name] = dict(version=VersionBinding(pkg.version), variant=VariantBinding(pkg)) - # just provide major/minor/patch during builds - if self.building: - if len(pkg.version) >= 1: - executor.setenv(prefix + "_MAJOR_VERSION", str(pkg.version[0])) - if len(pkg.version) >= 2: - executor.setenv(prefix + "_MINOR_VERSION", str(pkg.version[1])) - if len(pkg.version) >= 3: - executor.setenv(prefix + "_PATCH_VERSION", str(pkg.version[2])) - # commands for attr in ("pre_commands", "commands", "post_commands"): found = False diff --git a/src/rez/tests/test_commands.py b/src/rez/tests/test_commands.py index ba321fd3c..d07a64241 100644 --- a/src/rez/tests/test_commands.py +++ b/src/rez/tests/test_commands.py @@ -66,9 +66,18 @@ def _test_package(self, pkg, env, expected_commands): def _get_rextest_commands(self, pkg): verstr = str(pkg.version) base = os.path.join(self.packages_path, "rextest", verstr) + + major_version = str(pkg.version[0] if len(pkg.version) >= 1 else '') + minor_version = str(pkg.version[1] if len(pkg.version) >= 2 else '') + patch_version = str(pkg.version[2] if len(pkg.version) >= 3 else '') + cmds = [Setenv('REZ_REXTEST_VERSION', verstr), + Setenv('REZ_REXTEST_MAJOR_VERSION', major_version), + Setenv('REZ_REXTEST_MINOR_VERSION', minor_version), + Setenv('REZ_REXTEST_PATCH_VERSION', patch_version), Setenv('REZ_REXTEST_BASE', base), Setenv('REZ_REXTEST_ROOT', base), + # from package... Setenv('REXTEST_ROOT', base), Setenv('REXTEST_VERSION', verstr), Setenv('REXTEST_MAJOR_VERSION', str(pkg.version[0])), @@ -110,10 +119,16 @@ def test_2(self): Setenv('REZ_USED_RESOLVE', "rextest-1.3 rextest2-2"), # rez's rextest vars Setenv('REZ_REXTEST_VERSION', "1.3"), + Setenv('REZ_REXTEST_MAJOR_VERSION', '1'), + Setenv('REZ_REXTEST_MINOR_VERSION', '3'), + Setenv('REZ_REXTEST_PATCH_VERSION', ''), Setenv('REZ_REXTEST_BASE', base), Setenv('REZ_REXTEST_ROOT', base), # rez's rextest2 vars Setenv('REZ_REXTEST2_VERSION', '2'), + Setenv('REZ_REXTEST2_MAJOR_VERSION', '2'), + Setenv('REZ_REXTEST2_MINOR_VERSION', ''), + Setenv('REZ_REXTEST2_PATCH_VERSION', ''), Setenv('REZ_REXTEST2_BASE', base2), Setenv('REZ_REXTEST2_ROOT', base2), # rextest's commands diff --git a/src/rezplugins/build_system/cmake.py b/src/rezplugins/build_system/cmake.py index d569e3ebf..81e812922 100644 --- a/src/rezplugins/build_system/cmake.py +++ b/src/rezplugins/build_system/cmake.py @@ -211,7 +211,7 @@ def _add_build_actions(cls, executor, context, package, variant, cmake_path = os.path.join(os.path.dirname(__file__), "cmake_files") template_path = os.path.join(os.path.dirname(__file__), "template_files") - cls.set_standard_vars(executor, context, variant, build_type, install) + #cls.set_standard_vars(executor, context, variant, build_type, install) executor.env.CMAKE_MODULE_PATH.append(cmake_path.replace('\\', '/')) executor.env.REZ_BUILD_DOXYFILE = os.path.join(template_path, 'Doxyfile') From 604ae7cfede2cdaa292ecc8f7136b49902bd068e Mon Sep 17 00:00:00 2001 From: ajohns Date: Thu, 2 Mar 2017 13:53:52 +1100 Subject: [PATCH 047/124] fixed bug from forgotten commenting --- src/rezplugins/build_process/local.py | 2 +- src/rezplugins/build_system/cmake.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/rezplugins/build_process/local.py b/src/rezplugins/build_process/local.py index 6af96f976..06f3c3c20 100644 --- a/src/rezplugins/build_process/local.py +++ b/src/rezplugins/build_process/local.py @@ -144,7 +144,7 @@ def _build_variant_base(self, variant, build_type, install_path=None, build_type=build_type) if not build_result.get("success"): - raise BuildError("The %s build system failed" % build_system_name) + raise BuildError("The %s build system failed." % build_system_name) if install: # install some files for debugging purposes diff --git a/src/rezplugins/build_system/cmake.py b/src/rezplugins/build_system/cmake.py index 81e812922..d569e3ebf 100644 --- a/src/rezplugins/build_system/cmake.py +++ b/src/rezplugins/build_system/cmake.py @@ -211,7 +211,7 @@ def _add_build_actions(cls, executor, context, package, variant, cmake_path = os.path.join(os.path.dirname(__file__), "cmake_files") template_path = os.path.join(os.path.dirname(__file__), "template_files") - #cls.set_standard_vars(executor, context, variant, build_type, install) + cls.set_standard_vars(executor, context, variant, build_type, install) executor.env.CMAKE_MODULE_PATH.append(cmake_path.replace('\\', '/')) executor.env.REZ_BUILD_DOXYFILE = os.path.join(template_path, 'Doxyfile') From 1a809008398ea14969cf057e9030bd81c451e84d Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 4 Mar 2017 08:23:41 +1100 Subject: [PATCH 048/124] rez-test WIP --- src/rez/cli/_util.py | 1 + src/rez/cli/test.py | 49 +++++++ src/rez/exceptions.py | 5 + src/rez/package_maker__.py | 12 ++ src/rez/package_resources_.py | 29 ++++- src/rez/package_test.py | 238 ++++++++++++++++++++++++++++++++++ src/rez/packages_.py | 23 ++++ src/rez/resolved_context.py | 2 +- src/rez/utils/formatting.py | 2 +- 9 files changed, 354 insertions(+), 7 deletions(-) create mode 100644 src/rez/cli/test.py create mode 100644 src/rez/package_test.py diff --git a/src/rez/cli/_util.py b/src/rez/cli/_util.py index ef7116231..805e58fff 100644 --- a/src/rez/cli/_util.py +++ b/src/rez/cli/_util.py @@ -21,6 +21,7 @@ "pip", "release", "search", + "test", "view", "status", "suite", diff --git a/src/rez/cli/test.py b/src/rez/cli/test.py new file mode 100644 index 000000000..a1ad96b8c --- /dev/null +++ b/src/rez/cli/test.py @@ -0,0 +1,49 @@ +''' +Run tests listed in a package's definition file. +''' + + +def setup_parser(parser, completions=False): + parser.add_argument( + "-l", "--list", action="store_true", + help="list package's tests and exit") + PKG_action = parser.add_argument( + "PKG", + help="package run tests on") + parser.add_argument( + "TEST", nargs='*', + help="tests to run (run all if not provided)") + + if completions: + from rez.cli._complete_util import PackageCompleter + PKG_action.completer = PackageCompleter + + +def command(opts, parser, extra_arg_groups=None): + from rez.package_test import PackageTestRunner + import sys + + runner = PackageTestRunner(package_request=opts.PKG, + verbose=True) + + test_names = runner.get_test_names() + if not test_names: + uri = runner.get_package().uri + print >> sys.stderr, "No tests found in %s" % uri + sys.exit(0) + + if opts.list: + print '\n'.join(test_names) + sys.exit(0) + + if opts.TEST: + run_test_names = opts.TEST + else: + run_test_names = test_names + + for test_name in run_test_names: + proc = runner.run_test(test_name) + proc.wait() + + if proc.returncode: + sys.exit(proc.returncode) diff --git a/src/rez/exceptions.py b/src/rez/exceptions.py index 870f9a67c..59992f4c7 100644 --- a/src/rez/exceptions.py +++ b/src/rez/exceptions.py @@ -88,6 +88,11 @@ class PackageRequestError(RezError): pass +class PackageTestError(RezError): + """There was a problem running a package test.""" + pass + + class ResolvedContextError(RezError): """An error occurred in a resolved context.""" pass diff --git a/src/rez/package_maker__.py b/src/rez/package_maker__.py index ccd0c5798..533d7cd94 100644 --- a/src/rez/package_maker__.py +++ b/src/rez/package_maker__.py @@ -20,6 +20,16 @@ package_request_schema = Or(And(basestring, Use(expand_requirement)), And(PackageRequest, Use(str))) +tests_schema = Schema({ + Optional(basestring): Or( + Or(basestring, [basestring]), + { + "command": Or(basestring, [basestring]), + Optional("requires"): [package_request_schema] + } + ) +}) + package_schema = Schema({ Required("name"): basestring, @@ -41,6 +51,8 @@ Optional('tools'): late_bound([basestring]), Optional('help'): late_bound(help_schema), + Optional('tests'): tests_schema, + Optional('pre_commands'): _commands_schema, Optional('commands'): _commands_schema, Optional('post_commands'): _commands_schema, diff --git a/src/rez/package_resources_.py b/src/rez/package_resources_.py index feaada688..787ad8be9 100644 --- a/src/rez/package_resources_.py +++ b/src/rez/package_resources_.py @@ -60,16 +60,34 @@ def late_bound(schema): #------------------------------------------------------------------------------ # requirements of all package-related resources +# + base_resource_schema_dict = { Required("name"): basestring } # package family +# + package_family_schema_dict = base_resource_schema_dict.copy() # schema common to both package and variant +# + +tests_schema = Schema({ + Optional(basestring): Or( + Or(basestring, [basestring]), + { + "command": Or(basestring, [basestring]), + Optional("requires"): [ + Or(PackageRequest, And(basestring, Use(PackageRequest))) + ] + } + ) +}) + package_base_schema_dict = base_resource_schema_dict.copy() package_base_schema_dict.update({ # basics @@ -93,6 +111,9 @@ def late_bound(schema): Optional('tools'): late_bound([basestring]), Optional('help'): late_bound(help_schema), + # testing + Optional('tests'): tests_schema, + # commands Optional('pre_commands'): SourceCode, Optional('commands'): SourceCode, @@ -116,9 +137,7 @@ def late_bound(schema): package_schema_dict = package_base_schema_dict.copy() package_schema_dict.update({ # deliberately not possible to late bind - Optional("variants"): [[PackageRequest]], - - Optional("preprocess"): SourceCode + Optional("variants"): [[PackageRequest]] }) @@ -179,12 +198,12 @@ def late_bound(schema): Optional('tools'): late_bound([basestring]), Optional('help'): late_bound(help_schema), + Optional('tests'): tests_schema, + Optional('pre_commands'): _commands_schema, Optional('commands'): _commands_schema, Optional('post_commands'): _commands_schema, - Optional("preprocess"): _function_schema, - Optional("timestamp"): int, Optional('revision'): object, Optional('changelog'): large_string_dict, diff --git a/src/rez/package_test.py b/src/rez/package_test.py new file mode 100644 index 000000000..337d04f33 --- /dev/null +++ b/src/rez/package_test.py @@ -0,0 +1,238 @@ +from rez.config import config +from rez.resolved_context import ResolvedContext +from rez.packages_ import get_latest_package_from_string, Variant +from rez.exceptions import PackageNotFoundError, PackageTestError +from rez.utils.colorize import heading, Printer +from pipes import quote +import subprocess +import sys + + +class PackageTestRunner(object): + """Object for running a package's tests. + + This runs the tests listed in the package's "tests" attribute. + + An example tests entry in a package.py might look like this: + + tests = { + "unit": "python -m unittest -s {root}/tests", + "CI": { + "command": "python {root}/ci_tests/main.py", + "requires": ["maya-2017"], + "replace": True + } + } + + By default tests are run in an environment containing the current package. + + If a test entry is just a string, then it is treated as the test + command. If a dict, the "command" string is the command, and the "requires" + list is added to the test env. + + Command strings automatically expand references such as '{root}', much + as happens in a *commands* function. + + Commands can also be a list - in this case, the test process is launched + directly, rather than interpreted via a shell. + """ + def __init__(self, package_request, use_current_env=False, + extra_package_requests=None, package_paths=None, stdout=None, + stderr=None, verbose=False, **context_kwargs): + """Create a package tester. + + Args: + package_request (str or `PackageRequest`): The package to test. + use_current_env (bool): If True, run the test directly in the current + rez-resolved environment, if there is one, and if it contains + packages that meet the test's requirements. + extra_package_requests (list of str or `PackageRequest`): Extra + requests, these are appended to the test environment. + package_paths: List of paths to search for pkgs, defaults to + config.packages_path. + stdout (file-like object): Defaults to sys.stdout. + stderr (file-like object): Defaults to sys.stderr. + verbose (bool): Verbose mode. + context_kwargs: Extra arguments which are passed to the + `ResolvedContext` instances used to run the tests within. + Ignored if `use_current_env` is True. + """ + self.package_request = package_request + self.use_current_env = use_current_env + self.extra_package_requests = extra_package_requests + self.stdout = stdout or sys.stdout + self.stderr = stderr or sys.stderr + self.verbose = verbose + self.context_kwargs = context_kwargs + + self.package_paths = (config.packages_path if package_paths is None + else package_paths) + + self.package = None + self.contexts = {} + + def get_package(self): + """Get the target package. + + Returns: + `Package`: Package to run tests on. + """ + if self.package is not None: + return self.package + + if self.use_current_env: + pass + else: + package = get_latest_package_from_string(str(self.package_request), + self.package_paths) + if package is None: + raise PackageNotFoundError("Could not find package to test - %s" + % str(self.package_request)) + + self.package = package + return self.package + + def get_test_names(self): + """Get the names of tests in this package. + + Returns: + List of str: Test names. + """ + package = self.get_package() + return sorted((package.tests or {}).keys()) + + def can_run_test(self, test_name): + """See if test can be run. + + The only time a test cannot be run is when `self.use_current_env` is + True, and the current env does not have the necessary requirements. + + Returns: + 2-tuple: + - bool: True if test can be run; + - str: Description of why test cannot be run (or empty string). + """ + if not self.use_current_env: + return True, '' + + return False, "TODO" + + def run_test(self, test_name): + """Run a test. + + Runs the test in its correct environment. Note that if tests share the + same requirements, the contexts will be reused. + + Returns: + subprocess.Popen: Test process. + """ + package = self.get_package() + test_info = self._get_test_info(test_name) + command = test_info["command"] + requires = test_info["requires"] + + def print_header(txt, *nargs): + pr = Printer(sys.stdout) + pr(txt % nargs, heading) + + def print_command_header(): + if self.verbose: + if isinstance(command, basestring): + cmd_str = command + else: + cmd_str = ' '.join(map(quote, command)) + + print_header("\n\nRunning test '%s'\nCommand: %s\n", + test_name, cmd_str) + + def expand_command(context, command): + variant = context.get_resolved_package(package.name) + if isinstance(command, basestring): + return variant.format(command) + else: + return map(variant.format, command) + + if self.use_current_env: + can_run, descr = self.can_run_test(test_name) + if not can_run: + raise PackageTestError( + "Cannot run test '%s' of package %s in the current " + "environment: %s" % (test_name, package.uri, descr)) + + context = ResolvedContext.get_current() + command = expand_command(context, command) + + print_command_header() + + # run directly as subprocess + p = subprocess.Popen(command, shell=isinstance(command, basestring), + stdout=self.stdout, stderr=self.stderr) + return p + + # create/reuse context to run test within + key = tuple(requires) + context = self.contexts.get(key) + + if context is None: + if self.verbose: + print_header("\nResolving environment for test '%s': %s\n%s\n", + test_name, ' '.join(map(quote, requires)), '-' * 80) + + context = ResolvedContext(package_requests=requires, + package_paths=self.package_paths, + buf=self.stdout, + **self.context_kwargs) + + if not context.success: + context.print_info(buf=self.stderr) + raise PackageTestError( + "Cannot run test '%s' of package %s: the environment " + "failed to resolve" % (test_name, package.uri)) + + self.contexts[key] = context + + command = expand_command(context, command) + + if self.verbose: + context.print_info(self.stdout) + print_command_header() + + return context.execute_shell(command=command, + stdout=self.stdout, + stderr=self.stderr) + + def _get_test_info(self, test_name): + package = self.get_package() + + tests_dict = package.tests or {} + test_entry = tests_dict.get(test_name) + + if not test_entry: + raise PackageTestError("Test '%s' not found in package %s" + % (test_name, package.uri)) + + if not isinstance(test_entry, dict): + test_entry = { + "command": test_entry + } + + # construct env request + requires = [] + + if len(package.version): + req = "%s==%s" % (package.name, str(package.version)) + requires.append(req) + else: + requires.append(str(package)) + + reqs = test_entry.get("requires") or [] + requires.extend(reqs) + + if self.extra_package_requests: + reqs = map(str, self.extra_package_requests) + requires.extend(reqs) + + return { + "command": test_entry["command"], + "requires": requires + } diff --git a/src/rez/packages_.py b/src/rez/packages_.py index 6dd176e8e..92efed142 100644 --- a/src/rez/packages_.py +++ b/src/rez/packages_.py @@ -677,10 +677,33 @@ def get_latest_package(name, range_=None, paths=None, error=False): return max(it, key=lambda x: x.version) except ValueError: # empty sequence if error: + # FIXME this isn't correct, since the pkg fam may exist but a pkg + # in the range does not. raise PackageFamilyNotFoundError("No such package family %r" % name) return None +def get_latest_package_from_string(txt, paths=None, error=False): + """Get the latest package found within the given request string. + + Args: + txt (str): Request, eg 'foo-1.2+' + paths (list of str, optional): paths to search for package families, + defaults to `config.packages_path`. + error (bool): If True, raise an error if no package is found. + + Returns: + `Package` object, or None if no package is found. + """ + from rez.utils.formatting import PackageRequest + + req = PackageRequest(txt) + return get_latest_package(name=req.name, + range_=req.range_, + paths=paths, + error=error) + + def _get_families(name, paths=None): entries = [] for path in (paths or config.packages_path): diff --git a/src/rez/resolved_context.py b/src/rez/resolved_context.py index ab9d79d11..fa692a159 100644 --- a/src/rez/resolved_context.py +++ b/src/rez/resolved_context.py @@ -647,7 +647,7 @@ def print_info(self, buf=sys.stdout, verbosity=0, source_order=False, buf (file-like object): Where to print this info to. verbosity (bool): Verbose mode. source_order (bool): If True, print resolved packages in the order - they are sources, rather than alphabetical order. + they are sourced, rather than alphabetical order. show_resolved_uris (bool): By default, resolved packages have their 'root' property listed, or their 'uri' if 'root' is None. Use this option to list 'uri' regardless. diff --git a/src/rez/utils/formatting.py b/src/rez/utils/formatting.py index 1a46e353e..370cf76da 100644 --- a/src/rez/utils/formatting.py +++ b/src/rez/utils/formatting.py @@ -43,7 +43,7 @@ class PackageRequest(Requirement): Example: - >>> pr = PackageRequirement("foo-1.3+") + >>> pr = PackageRequest("foo-1.3+") >>> print pr.name, pr.range foo 1.3+ """ From 3afad298009b4d9b44fa26ac50088c2896c67080 Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 4 Mar 2017 08:44:29 +1100 Subject: [PATCH 049/124] -imported modules in package.py now stripped from installed package.py -updated some tests to check behavior when imports in package.py --- src/rez/serialise.py | 17 +++++++++++++++-- .../tests/data/builds/packages/floob/package.py | 4 ++++ .../data/packages/developer_dynamic/package.py | 7 +++++++ src/rez/utils/_version.py | 2 +- 4 files changed, 27 insertions(+), 3 deletions(-) diff --git a/src/rez/serialise.py b/src/rez/serialise.py index fb4eca0da..a885b0787 100644 --- a/src/rez/serialise.py +++ b/src/rez/serialise.py @@ -13,7 +13,7 @@ from rez.vendor.enum import Enum from rez.vendor import yaml from contextlib import contextmanager -from inspect import isfunction, getargspec +from inspect import isfunction, ismodule, getargspec from StringIO import StringIO import sys import os @@ -172,10 +172,17 @@ def load_py(stream, filepath=None): def process_python_objects(data, filepath=None): + _remove = object() + def _process(value): if isinstance(value, dict): for k, v in value.items(): - value[k] = _process(v) + new_value = _process(v) + + if new_value is _remove: + del value[k] + else: + value[k] = new_value return value elif isfunction(value): @@ -211,6 +218,12 @@ def _process(value): return SourceCode(func=value, filepath=filepath, eval_as_function=as_function) + elif ismodule(value): + # modules cannot be installed as package attributes. They are present + # in developer packages sometimes though - it's fine for a package + # attribute to use an imported module at build time. + # + return _remove else: return value diff --git a/src/rez/tests/data/builds/packages/floob/package.py b/src/rez/tests/data/builds/packages/floob/package.py index 1cdbc5e77..d45edca91 100644 --- a/src/rez/tests/data/builds/packages/floob/package.py +++ b/src/rez/tests/data/builds/packages/floob/package.py @@ -1,3 +1,7 @@ + +# make sure imported modules don't break package installs +import os + name = 'floob' version = '1.2.0' authors = ["joe.bloggs"] diff --git a/src/rez/tests/data/packages/developer_dynamic/package.py b/src/rez/tests/data/packages/developer_dynamic/package.py index 4fd493e72..8223a0fbd 100644 --- a/src/rez/tests/data/packages/developer_dynamic/package.py +++ b/src/rez/tests/data/packages/developer_dynamic/package.py @@ -1,3 +1,4 @@ + name = "developer_dynamic" @early() @@ -11,3 +12,9 @@ def description(): def preprocess(this, data): from early_utils import get_authors data["authors"] = get_authors() + +# make sure imported modules don't break developer packages +import sys + +# make sure attribute can use imported module +built_on = sys.platform diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index 848003c45..ccda3f533 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.6.0" +_rez_version = "2.6.1" # Copyright 2013-2016 Allan Johns. From f57214ec35e8ee5c05e9639053802cb9137e9faa Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 4 Mar 2017 08:59:38 +1100 Subject: [PATCH 050/124] -version up --- src/rez/utils/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index ccda3f533..3cc77bd17 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.6.1" +_rez_version = "2.6.2" # Copyright 2013-2016 Allan Johns. From 4f2c2855544664f2569f4de7e2eac8ebb2cef9c5 Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 4 Mar 2017 12:16:13 +1100 Subject: [PATCH 051/124] -first pass rez-test support -doesn't iterate variants yet (have to wait for explicit var req feature) --- bin/rez-test | 3 + src/rez/cli/test.py | 7 +- src/rez/package_maker__.py | 2 +- src/rez/package_resources_.py | 4 +- src/rez/package_serialise.py | 14 ++- src/rez/package_test.py | 170 ++++++++++++++++++---------------- 6 files changed, 111 insertions(+), 89 deletions(-) create mode 100644 bin/rez-test diff --git a/bin/rez-test b/bin/rez-test new file mode 100644 index 000000000..06f984b24 --- /dev/null +++ b/bin/rez-test @@ -0,0 +1,3 @@ +#!/usr/bin/env python +from rez.cli._main import run +run("test") diff --git a/src/rez/cli/test.py b/src/rez/cli/test.py index a1ad96b8c..9befaab88 100644 --- a/src/rez/cli/test.py +++ b/src/rez/cli/test.py @@ -42,8 +42,7 @@ def command(opts, parser, extra_arg_groups=None): run_test_names = test_names for test_name in run_test_names: - proc = runner.run_test(test_name) - proc.wait() + returncode = runner.run_test(test_name) - if proc.returncode: - sys.exit(proc.returncode) + if returncode: + sys.exit(returncode) diff --git a/src/rez/package_maker__.py b/src/rez/package_maker__.py index 533d7cd94..86ecd3b73 100644 --- a/src/rez/package_maker__.py +++ b/src/rez/package_maker__.py @@ -51,7 +51,7 @@ Optional('tools'): late_bound([basestring]), Optional('help'): late_bound(help_schema), - Optional('tests'): tests_schema, + Optional('tests'): late_bound(tests_schema), Optional('pre_commands'): _commands_schema, Optional('commands'): _commands_schema, diff --git a/src/rez/package_resources_.py b/src/rez/package_resources_.py index 787ad8be9..92324ec28 100644 --- a/src/rez/package_resources_.py +++ b/src/rez/package_resources_.py @@ -112,7 +112,7 @@ def late_bound(schema): Optional('help'): late_bound(help_schema), # testing - Optional('tests'): tests_schema, + Optional('tests'): late_bound(tests_schema), # commands Optional('pre_commands'): SourceCode, @@ -198,7 +198,7 @@ def late_bound(schema): Optional('tools'): late_bound([basestring]), Optional('help'): late_bound(help_schema), - Optional('tests'): tests_schema, + Optional('tests'): late_bound(tests_schema), Optional('pre_commands'): _commands_schema, Optional('commands'): _commands_schema, diff --git a/src/rez/package_serialise.py b/src/rez/package_serialise.py index 296ce23c3..fff332bbd 100644 --- a/src/rez/package_serialise.py +++ b/src/rez/package_serialise.py @@ -41,12 +41,20 @@ version_schema = Or(basestring, And(Version, Use(str))) - package_request_schema = Or(basestring, And(PackageRequest, Use(str))) - source_code_schema = Or(SourceCode, And(basestring, Use(SourceCode))) +tests_schema = Schema({ + Optional(basestring): Or( + Or(basestring, [basestring]), + { + "command": Or(basestring, [basestring]), + Optional("requires"): [package_request_schema] + } + ) +}) + # package serialisation schema package_serialise_schema = Schema({ @@ -70,6 +78,8 @@ Optional("uuid"): basestring, Optional("config"): dict, + Optional('tests'): late_bound(tests_schema), + Optional("timestamp"): int, Optional('revision'): object, Optional('changelog'): basestring, diff --git a/src/rez/package_test.py b/src/rez/package_test.py index 337d04f33..f9bd42ffd 100644 --- a/src/rez/package_test.py +++ b/src/rez/package_test.py @@ -35,6 +35,16 @@ class PackageTestRunner(object): Commands can also be a list - in this case, the test process is launched directly, rather than interpreted via a shell. + + TODO FIXME: Currently a test will not be run over the variants of a + package. This is because there is no reliable way to resolve to a specific + variant's env - we can influence the variant chosen, knowing how the + variant selection mode works, but this is not a guarantee and it would + be error-prone and complicated to do it this way. For reasons beyond + package testing, we want to be able to explicitly specify a variant to + resolve to anyway, so this will be fixed in a separate feature. Once that + is available, this code will be updated to iterate over a package's + variants and run tests in each. """ def __init__(self, package_request, use_current_env=False, extra_package_requests=None, package_paths=None, stdout=None, @@ -71,6 +81,9 @@ def __init__(self, package_request, use_current_env=False, self.package = None self.contexts = {} + if use_current_env: + raise NotImplementedError + def get_package(self): """Get the target package. @@ -101,115 +114,112 @@ def get_test_names(self): package = self.get_package() return sorted((package.tests or {}).keys()) - def can_run_test(self, test_name): - """See if test can be run. - - The only time a test cannot be run is when `self.use_current_env` is - True, and the current env does not have the necessary requirements. - - Returns: - 2-tuple: - - bool: True if test can be run; - - str: Description of why test cannot be run (or empty string). - """ - if not self.use_current_env: - return True, '' - - return False, "TODO" - def run_test(self, test_name): """Run a test. Runs the test in its correct environment. Note that if tests share the same requirements, the contexts will be reused. + TODO: If the package had variants, the test will be run for each + variant. + Returns: - subprocess.Popen: Test process. + int: Returncode - zero if all test(s) passed, otherwise the return + code of the failed test. """ - package = self.get_package() - test_info = self._get_test_info(test_name) - command = test_info["command"] - requires = test_info["requires"] - def print_header(txt, *nargs): pr = Printer(sys.stdout) pr(txt % nargs, heading) - def print_command_header(): - if self.verbose: - if isinstance(command, basestring): - cmd_str = command - else: - cmd_str = ' '.join(map(quote, command)) + package = self.get_package() - print_header("\n\nRunning test '%s'\nCommand: %s\n", - test_name, cmd_str) + if test_name not in self.get_test_names(): + raise PackageTestError("Test '%s' not found in package %s" + % (test_name, package.uri)) + + if self.use_current_env: + return self._run_test_in_current_env(test_name) + + for variant in package.iter_variants(): + + # get test info for this variant. If None, that just means that this + # variant doesn't provide this test. That's ok - 'tests' might be + # implemented as a late function attribute that provides some tests + # for some variants and not others + # + test_info = self._get_test_info(test_name, variant) + if not test_info: + continue - def expand_command(context, command): - variant = context.get_resolved_package(package.name) + command = test_info["command"] + requires = test_info["requires"] + + # expand refs like {root} in commands if isinstance(command, basestring): - return variant.format(command) + command = variant.format(command) else: - return map(variant.format, command) + command = map(variant.format, command) - if self.use_current_env: - can_run, descr = self.can_run_test(test_name) - if not can_run: - raise PackageTestError( - "Cannot run test '%s' of package %s in the current " - "environment: %s" % (test_name, package.uri, descr)) + # show progress + if self.verbose: + print_header( + "\nTest: %s\nPackage: %s\n%s\n", + test_name, variant.uri, '-' * 80) - context = ResolvedContext.get_current() - command = expand_command(context, command) + # create test env + key = tuple(requires) + context = self.contexts.get(key) - print_command_header() + if context is None: + if self.verbose: + print_header("Resolving test environment: %s\n", + ' '.join(map(quote, requires))) - # run directly as subprocess - p = subprocess.Popen(command, shell=isinstance(command, basestring), - stdout=self.stdout, stderr=self.stderr) - return p + context = ResolvedContext(package_requests=requires, + package_paths=self.package_paths, + buf=self.stdout, + **self.context_kwargs) - # create/reuse context to run test within - key = tuple(requires) - context = self.contexts.get(key) + if not context.success: + context.print_info(buf=self.stderr) + raise PackageTestError( + "Cannot run test '%s' of package %s: the environment " + "failed to resolve" % (test_name, variant.uri)) - if context is None: - if self.verbose: - print_header("\nResolving environment for test '%s': %s\n%s\n", - test_name, ' '.join(map(quote, requires)), '-' * 80) + self.contexts[key] = context - context = ResolvedContext(package_requests=requires, - package_paths=self.package_paths, - buf=self.stdout, - **self.context_kwargs) + # run the test in the context + if self.verbose: + context.print_info(self.stdout) - if not context.success: - context.print_info(buf=self.stderr) - raise PackageTestError( - "Cannot run test '%s' of package %s: the environment " - "failed to resolve" % (test_name, package.uri)) + if isinstance(command, basestring): + cmd_str = command + else: + cmd_str = ' '.join(map(quote, command)) - self.contexts[key] = context + print_header("\nRunning test command: %s\n" % cmd_str) - command = expand_command(context, command) + retcode, _, _ = context.execute_shell( + command=command, + stdout=self.stdout, + stderr=self.stderr, + block=True) - if self.verbose: - context.print_info(self.stdout) - print_command_header() + if retcode: + return retcode - return context.execute_shell(command=command, - stdout=self.stdout, - stderr=self.stderr) + # TODO FIXME we don't iterate over all variants yet, because we + # can't reliably do that (see class docstring) + break - def _get_test_info(self, test_name): - package = self.get_package() + return 0 # success - tests_dict = package.tests or {} + def _get_test_info(self, test_name, variant): + tests_dict = variant.tests or {} test_entry = tests_dict.get(test_name) if not test_entry: - raise PackageTestError("Test '%s' not found in package %s" - % (test_name, package.uri)) + return None if not isinstance(test_entry, dict): test_entry = { @@ -219,14 +229,14 @@ def _get_test_info(self, test_name): # construct env request requires = [] - if len(package.version): - req = "%s==%s" % (package.name, str(package.version)) + if len(variant.version): + req = "%s==%s" % (variant.name, str(variant.version)) requires.append(req) else: - requires.append(str(package)) + requires.append(variant.name) reqs = test_entry.get("requires") or [] - requires.extend(reqs) + requires.extend(map(str, reqs)) if self.extra_package_requests: reqs = map(str, self.extra_package_requests) From 27021aadb3707bc1508633fca507a35f3eecb9ba Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 4 Mar 2017 14:34:06 +1100 Subject: [PATCH 052/124] -added cli args -added timestamp so tests in runner dont pick up newer packages --- src/rez/cli/test.py | 20 ++++++++++++++++++++ src/rez/package_test.py | 8 +++++++- 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/src/rez/cli/test.py b/src/rez/cli/test.py index 9befaab88..2a0628049 100644 --- a/src/rez/cli/test.py +++ b/src/rez/cli/test.py @@ -7,6 +7,15 @@ def setup_parser(parser, completions=False): parser.add_argument( "-l", "--list", action="store_true", help="list package's tests and exit") + parser.add_argument( + "--paths", type=str, default=None, + help="set package search path") + parser.add_argument( + "--nl", "--no-local", dest="no_local", action="store_true", + help="don't load local packages") + PKG_action = parser.add_argument( + "--extra-packages", nargs='+', metavar="PKG", + help="extra packages to add to test environment") PKG_action = parser.add_argument( "PKG", help="package run tests on") @@ -21,9 +30,20 @@ def setup_parser(parser, completions=False): def command(opts, parser, extra_arg_groups=None): from rez.package_test import PackageTestRunner + from rez.config import config + import os.path import sys + if opts.paths is None: + pkg_paths = (config.nonlocal_packages_path + if opts.no_local else None) + else: + pkg_paths = opts.paths.split(os.pathsep) + pkg_paths = [os.path.expanduser(x) for x in pkg_paths if x] + runner = PackageTestRunner(package_request=opts.PKG, + package_paths=pkg_paths, + extra_package_requests=opts.extra_packages, verbose=True) test_names = runner.get_test_names() diff --git a/src/rez/package_test.py b/src/rez/package_test.py index f9bd42ffd..8620e2312 100644 --- a/src/rez/package_test.py +++ b/src/rez/package_test.py @@ -5,6 +5,7 @@ from rez.utils.colorize import heading, Printer from pipes import quote import subprocess +import time import sys @@ -81,6 +82,10 @@ def __init__(self, package_request, use_current_env=False, self.package = None self.contexts = {} + # use a common timestamp across all tests - this ensures that tests + # don't pick up new packages halfway through (ie from one test to another) + self.timestamp = int(time.time()) + if use_current_env: raise NotImplementedError @@ -99,7 +104,7 @@ def get_package(self): package = get_latest_package_from_string(str(self.package_request), self.package_paths) if package is None: - raise PackageNotFoundError("Could not find package to test - %s" + raise PackageNotFoundError("Could not find package to test: %s" % str(self.package_request)) self.package = package @@ -178,6 +183,7 @@ def print_header(txt, *nargs): context = ResolvedContext(package_requests=requires, package_paths=self.package_paths, buf=self.stdout, + timestamp=self.timestamp, **self.context_kwargs) if not context.success: From 47257b45802df43835dead3546ab091b3eeb0462 Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 4 Mar 2017 14:35:10 +1100 Subject: [PATCH 053/124] version up --- src/rez/utils/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index 3cc77bd17..277fbf5f8 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.6.2" +_rez_version = "2.7.0" # Copyright 2013-2016 Allan Johns. From aa819e8e1fad12c67a8f9c6244da8be80b77ba57 Mon Sep 17 00:00:00 2001 From: ajohns Date: Thu, 9 Mar 2017 15:42:51 +1100 Subject: [PATCH 054/124] -'build_commands' support added --- src/rez/build_system.py | 31 +++- src/rez/cli/build.py | 22 +-- src/rez/developer_package.py | 7 + src/rez/package_maker__.py | 2 + src/rez/package_resources_.py | 1 + src/rez/utils/_version.py | 2 +- src/rezplugins/build_system/bez.py | 34 +++-- src/rezplugins/build_system/cmake.py | 30 ++-- src/rezplugins/build_system/custom.py | 194 ++++++++++++++++++++++++++ 9 files changed, 287 insertions(+), 36 deletions(-) create mode 100644 src/rezplugins/build_system/custom.py diff --git a/src/rez/build_system.py b/src/rez/build_system.py index 8d1257baa..91869d4df 100644 --- a/src/rez/build_system.py +++ b/src/rez/build_system.py @@ -3,6 +3,7 @@ from rez.build_process_ import BuildType from rez.exceptions import BuildSystemError from rez.packages_ import get_developer_package +import os.path def get_buildsys_types(): @@ -95,6 +96,8 @@ def __init__(self, working_dir, opts=None, package=None, self.child_build_args = child_build_args self.verbose = verbose + self.opts = opts + @classmethod def is_valid_root(cls, path): """Return True if this build system can build the source in path.""" @@ -151,22 +154,26 @@ def build(self, context, variant, build_path, install_path, install=False, raise NotImplementedError @classmethod - def get_standard_vars(cls, context, variant, build_type, install): + def get_standard_vars(cls, context, variant, build_type, install, + build_path, install_path=None): """Returns a standard set of environment variables that can be set for the build system to use """ from rez.config import config package = variant.parent - vars = { + + vars_ = { 'REZ_BUILD_ENV': 1, + 'REZ_BUILD_PATH': build_path, 'REZ_BUILD_THREAD_COUNT': package.config.build_thread_count, 'REZ_BUILD_VARIANT_INDEX': variant.index or 0, 'REZ_BUILD_PROJECT_VERSION': str(package.version), 'REZ_BUILD_PROJECT_NAME': package.name, 'REZ_BUILD_PROJECT_DESCRIPTION': \ (package.description or '').strip(), - 'REZ_BUILD_PROJECT_FILE': getattr(package, 'filepath', ''), + 'REZ_BUILD_PROJECT_FILE': package.filepath, + 'REZ_BUILD_SOURCE_PATH': os.path.dirname(package.filepath), 'REZ_BUILD_REQUIRES': \ ' '.join(str(x) for x in context.requested_packages(True)), 'REZ_BUILD_REQUIRES_UNVERSIONED': \ @@ -175,19 +182,29 @@ def get_standard_vars(cls, context, variant, build_type, install): 'REZ_BUILD_INSTALL': 1 if install else 0, } + if install_path: + vars_['REZ_BUILD_INSTALL_PATH'] = install_path + if config.rez_1_environment_variables and \ not config.disable_rez_1_compatibility and \ build_type == BuildType.central: - vars['REZ_IN_REZ_RELEASE'] = 1 - return vars + vars_['REZ_IN_REZ_RELEASE'] = 1 + + return vars_ @classmethod def set_standard_vars(cls, executor, context, variant, build_type, - install): + install, build_path, install_path=None): """Sets a standard set of environment variables for the build system to use """ - vars = cls.get_standard_vars(context, variant, build_type, install) + vars = cls.get_standard_vars(context=context, + variant=variant, + build_type=build_type, + install=install, + build_path=build_path, + install_path=install_path) + for var, value in vars.iteritems(): executor.env[var] = value diff --git a/src/rez/cli/build.py b/src/rez/cli/build.py index 4c1988cc5..1b846994b 100644 --- a/src/rez/cli/build.py +++ b/src/rez/cli/build.py @@ -16,14 +16,20 @@ def setup_parser_common(parser): # add build system option only if one build system is associated with cwd clss = get_valid_build_systems(os.getcwd()) - if len(clss) == 1: - cls = clss[0] - cls.bind_cli(parser) - elif clss: - types = [x.name() for x in clss] - parser.add_argument( - "-b", "--build-system", dest="buildsys", type=str, choices=types, - help="the build system to use.") + + if clss: + # 'custom' takes precedence + if "custom" in [x.name() for x in clss]: + clss = [x for x in clss if x.name() == "custom"] + + if len(clss) == 1: + cls = clss[0] + cls.bind_cli(parser) + else: + types = [x.name() for x in clss] + parser.add_argument( + "-b", "--build-system", dest="buildsys", type=str, choices=types, + help="the build system to use.") parser.add_argument( "--variants", nargs='+', type=int, metavar="INDEX", diff --git a/src/rez/developer_package.py b/src/rez/developer_package.py index 14baceab6..ec8b6f17a 100644 --- a/src/rez/developer_package.py +++ b/src/rez/developer_package.py @@ -23,6 +23,13 @@ def __init__(self, resource): # include modules, derived from any present @include decorators self.includes = None + @property + def root(self): + if self.filepath: + return os.path.dirname(self.filepath) + else: + return None + @classmethod def from_path(cls, path): """Load a developer package. diff --git a/src/rez/package_maker__.py b/src/rez/package_maker__.py index 86ecd3b73..5724b055a 100644 --- a/src/rez/package_maker__.py +++ b/src/rez/package_maker__.py @@ -57,6 +57,8 @@ Optional('commands'): _commands_schema, Optional('post_commands'): _commands_schema, + # attributes specific to pre-built packages + Optional("build_command"): Or([basestring], basestring), Optional("preprocess"): _function_schema, # arbitrary fields diff --git a/src/rez/package_resources_.py b/src/rez/package_resources_.py index 92324ec28..123564027 100644 --- a/src/rez/package_resources_.py +++ b/src/rez/package_resources_.py @@ -26,6 +26,7 @@ # package attributes that we don't install package_build_only_keys = ( + "build_command", "preprocess", ) diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index 277fbf5f8..874b40f9d 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.7.0" +_rez_version = "2.8.0" # Copyright 2013-2016 Allan Johns. diff --git a/src/rezplugins/build_system/bez.py b/src/rezplugins/build_system/bez.py index ce6baabfd..355f82b2b 100644 --- a/src/rezplugins/build_system/bez.py +++ b/src/rezplugins/build_system/bez.py @@ -68,9 +68,10 @@ def build(self, context, variant, build_path, install_path, install=False, module=("build_system", "bez"), func_name="_FWD__spawn_build_shell", working_dir=self.working_dir, - build_dir=build_path, + build_path=build_path, variant_index=variant.index, - install=install) + install=install, + install_path=install_path) ret["success"] = True ret["build_env_script"] = build_env_script @@ -86,7 +87,9 @@ def build(self, context, variant, build_path, install_path, install=False, package=self.package, variant=variant, build_type=build_type, - install=install) + install=install, + build_path=build_path, + install_path=install_path) retcode, _, _ = context.execute_shell(command=cmd, block=True, @@ -97,13 +100,20 @@ def build(self, context, variant, build_path, install_path, install=False, @classmethod def _add_build_actions(cls, executor, context, package, variant, - build_type, install): - cls.set_standard_vars(executor, context, variant, build_type, install) - - -def _FWD__spawn_build_shell(working_dir, build_dir, variant_index, install): + build_type, install, build_path, install_path=None): + cls.set_standard_vars(executor=executor, + context=context, + variant=variant, + build_type=build_type, + install=install, + build_path=build_path, + install_path=install_path) + + +def _FWD__spawn_build_shell(working_dir, build_path, variant_index, install, + install_path=None): # This spawns a shell that the user can run 'bez' in directly - context = ResolvedContext.load(os.path.join(build_dir, "build.rxt")) + context = ResolvedContext.load(os.path.join(build_path, "build.rxt")) package = get_developer_package(working_dir) variant = package.get_variant(variant_index) config.override("prompt", "BUILD>") @@ -113,9 +123,11 @@ def _FWD__spawn_build_shell(working_dir, build_dir, variant_index, install): package=package, variant=variant, build_type=BuildType.local, - install=install) + install=install, + build_path=build_path, + install_path=install_path) - retcode, _, _ = context.execute_shell(block=True, cwd=build_dir, + retcode, _, _ = context.execute_shell(block=True, cwd=build_path, actions_callback=callback) sys.exit(retcode) diff --git a/src/rezplugins/build_system/cmake.py b/src/rezplugins/build_system/cmake.py index d569e3ebf..9dca87cc2 100644 --- a/src/rezplugins/build_system/cmake.py +++ b/src/rezplugins/build_system/cmake.py @@ -144,7 +144,9 @@ def _pr(s): package=self.package, variant=variant, build_type=build_type, - install=install) + install=install, + build_path=build_path, + install_path=install_path) # run the build command and capture/print stderr at the same time retcode, _, _ = context.execute_shell(command=cmd, @@ -164,9 +166,10 @@ def _pr(s): module=("build_system", "cmake"), func_name="_FWD__spawn_build_shell", working_dir=self.working_dir, - build_dir=build_path, + build_path=build_path, variant_index=variant.index, - install=install) + install=install, + install_path=install_path) ret["success"] = True ret["build_env_script"] = build_env_script return ret @@ -206,21 +209,28 @@ def _pr(s): @classmethod def _add_build_actions(cls, executor, context, package, variant, - build_type, install): + build_type, install, build_path, install_path=None): settings = package.config.plugins.build_system.cmake cmake_path = os.path.join(os.path.dirname(__file__), "cmake_files") template_path = os.path.join(os.path.dirname(__file__), "template_files") - cls.set_standard_vars(executor, context, variant, build_type, install) + cls.set_standard_vars(executor=executor, + context=context, + variant=variant, + build_type=build_type, + install=install, + build_path=build_path, + install_path=install_path) executor.env.CMAKE_MODULE_PATH.append(cmake_path.replace('\\', '/')) executor.env.REZ_BUILD_DOXYFILE = os.path.join(template_path, 'Doxyfile') executor.env.REZ_BUILD_INSTALL_PYC = '1' if settings.install_pyc else '0' -def _FWD__spawn_build_shell(working_dir, build_dir, variant_index, install): +def _FWD__spawn_build_shell(working_dir, build_path, variant_index, install, + install_path=None): # This spawns a shell that the user can run 'make' in directly - context = ResolvedContext.load(os.path.join(build_dir, "build.rxt")) + context = ResolvedContext.load(os.path.join(build_path, "build.rxt")) package = get_developer_package(working_dir) variant = package.get_variant(variant_index) config.override("prompt", "BUILD>") @@ -230,10 +240,12 @@ def _FWD__spawn_build_shell(working_dir, build_dir, variant_index, install): package=package, variant=variant, build_type=BuildType.local, - install=install) + install=install, + build_path=build_path, + install_path=install_path) retcode, _, _ = context.execute_shell(block=True, - cwd=build_dir, + cwd=build_path, actions_callback=callback) sys.exit(retcode) diff --git a/src/rezplugins/build_system/custom.py b/src/rezplugins/build_system/custom.py new file mode 100644 index 000000000..dfcc8bd3a --- /dev/null +++ b/src/rezplugins/build_system/custom.py @@ -0,0 +1,194 @@ +""" +Package-defined build command +""" +from rez.build_system import BuildSystem +from rez.build_process_ import BuildType +from rez.packages_ import get_developer_package +from rez.exceptions import PackageMetadataError, BuildSystemError +from rez.utils.colorize import heading, Printer +from rez.utils.logging_ import print_warning +from pipes import quote +import functools +import os.path +import sys +import os + + +class CustomBuildSystem(BuildSystem): + """This build system runs the 'build_command' defined in a package.py. + + For example, consider the package.py snippet: + + build_commands = "bash {root}/build.sh" + + This will run the given bash command in the build path - this is typically + located somewhere under the 'build' dir under the root dir containing the + package.py. The '{root}' string will expand to the source directory (the + one containing the package.py). + """ + @classmethod + def name(cls): + return "custom" + + @classmethod + def is_valid_root(cls, path): + try: + package = get_developer_package(path) + except PackageMetadataError: + return False + + return bool(getattr(package, "build_command", None)) + + def __init__(self, working_dir, opts=None, package=None, write_build_scripts=False, + verbose=False, build_args=[], child_build_args=[]): + super(CustomBuildSystem, self).__init__( + working_dir, + opts=opts, + package=package, + write_build_scripts=write_build_scripts, + verbose=verbose, + build_args=build_args, + child_build_args=child_build_args) + + @classmethod + def bind_cli(cls, parser): + """ + Uses a 'parse_build_args.py' file to add options, if found. + """ + try: + with open("./parse_build_args.py") as f: + source = f.read() + except Exception as e: + print str(e) + return + + # detect what extra args have been added + before_args = set(x.dest for x in parser._actions) + + try: + exec source in {"parser": parser} + except Exception as e: + print_warning("Error in ./parse_build_args.py: %s" % str(e)) + + after_args = set(x.dest for x in parser._actions) + extra_args = after_args - before_args + + # store extra args onto parser so we can get to it in self.build() + setattr(parser, "_rezbuild_extra_args", list(extra_args)) + + def build(self, context, variant, build_path, install_path, install=False, + build_type=BuildType.local): + """Perform the build. + + Note that most of the func args aren't used here - that's because this + info is already passed to the custom build command via environment + variables. + """ + ret = {} + + if self.write_build_scripts: + # write out the script that places the user in a build env, where + # they can run bez directly themselves. + build_env_script = os.path.join(build_path, "build-env") + create_forwarding_script(build_env_script, + module=("build_system", "custom"), + func_name="_FWD__spawn_build_shell", + working_dir=self.working_dir, + build_path=build_path, + variant_index=variant.index, + install=install, + install_path=install_path) + + ret["success"] = True + ret["build_env_script"] = build_env_script + return ret + + # get build command + command = self.package.build_command + + if isinstance(command, basestring): + if self.build_args: + command = command + ' ' + ' '.join(map(quote, self.build_args)) + + command = self.package.format(command) + cmd_str = command + else: # list + command = command + self.build_args + command = map(self.package.format, command) + cmd_str = ' '.join(map(quote, command)) + + if self.verbose: + pr = Printer(sys.stdout) + pr("Running build command: %s" % cmd_str, heading) + + # run the build command + def _callback(executor): + self._add_build_actions(executor, + context=context, + package=self.package, + variant=variant, + build_type=build_type, + install=install, + build_path=build_path, + install_path=install_path) + + # write args defined in ./parse_build_args.py out as env vars + extra_args = getattr(self.opts.parser, "_rezbuild_extra_args", []) + + for key, value in vars(self.opts).iteritems(): + if key in extra_args: + varname = "__PARSE_ARG_%s" % key.upper() + + # do some value conversions + if isinstance(value, bool): + value = 1 if value else 0 + elif isinstance(value, (list, tuple)): + value = map(str, value) + value = map(quote, value) + value = ' '.join(value) + + executor.env[varname] = value + + retcode, _, _ = context.execute_shell(command=command, + block=True, + cwd=build_path, + actions_callback=_callback) + ret["success"] = (not retcode) + return ret + + @classmethod + def _add_build_actions(cls, executor, context, package, variant, + build_type, install, build_path, install_path=None): + cls.set_standard_vars(executor=executor, + context=context, + variant=variant, + build_type=build_type, + install=install, + build_path=build_path, + install_path=install_path) + + +def _FWD__spawn_build_shell(working_dir, build_path, variant_index, install, + install_path=None): + # This spawns a shell that the user can run 'bez' in directly + context = ResolvedContext.load(os.path.join(build_path, "build.rxt")) + package = get_developer_package(working_dir) + variant = package.get_variant(variant_index) + config.override("prompt", "BUILD>") + + callback = functools.partial(CustomBuildSystem._add_build_actions, + context=context, + package=package, + variant=variant, + build_type=BuildType.local, + install=install, + build_path=build_path, + install_path=install_path) + + retcode, _, _ = context.execute_shell(block=True, cwd=build_path, + actions_callback=callback) + sys.exit(retcode) + + +def register_plugin(): + return CustomBuildSystem From 6c70da1d39d8e3b3875cb9384c6f1e5e1e9f7f0c Mon Sep 17 00:00:00 2001 From: ajohns Date: Fri, 10 Mar 2017 14:11:05 +1100 Subject: [PATCH 055/124] -misc fixes for new custom_build command -added make-based build test using build_command --- setup.py | 8 +-- src/rez/build_system.py | 5 ++ src/rez/cli/build.py | 4 -- .../data/builds/packages/hello/1.0/Makefile | 8 +++ .../builds/packages/hello/1.0/lib/main.cpp | 8 +++ .../data/builds/packages/hello/1.0/package.py | 13 +++++ src/rez/tests/test_build.py | 19 ++++++- src/rez/tests/util.py | 56 ++++++++++++------- src/rezplugins/build_system/custom.py | 37 ++++++------ 9 files changed, 111 insertions(+), 47 deletions(-) create mode 100644 src/rez/tests/data/builds/packages/hello/1.0/Makefile create mode 100644 src/rez/tests/data/builds/packages/hello/1.0/lib/main.cpp create mode 100644 src/rez/tests/data/builds/packages/hello/1.0/package.py diff --git a/setup.py b/setup.py index 235b17833..8eb77414b 100644 --- a/setup.py +++ b/setup.py @@ -90,15 +90,15 @@ def find_files(pattern, path=None, root="rez"): 'rez': ['rezconfig', 'utils/logging.conf'] + ['README*'] + - find_files('*.*', 'completion') + - find_files('*.*', 'tests/data'), + find_files('*', 'completion') + + find_files('*', 'tests/data'), 'rezplugins': find_files('rezconfig', root='rezplugins') + find_files('*.cmake', 'build_system', root='rezplugins') + - find_files('*.*', 'build_system/template_files', root='rezplugins'), + find_files('*', 'build_system/template_files', root='rezplugins'), 'rezgui': find_files('rezguiconfig', root='rezgui') + - find_files('*.*', 'icons', root='rezgui') + find_files('*', 'icons', root='rezgui') }, classifiers = [ "Development Status :: 4 - Beta", diff --git a/src/rez/build_system.py b/src/rez/build_system.py index 91869d4df..5acfd5b91 100644 --- a/src/rez/build_system.py +++ b/src/rez/build_system.py @@ -21,6 +21,11 @@ def get_valid_build_systems(working_dir): cls = plugin_manager.get_plugin_class('build_system', buildsys_name) if cls.is_valid_root(working_dir): clss.append(cls) + + # explicit build command in package.py takes precedence + if "custom" in [x.name() for x in clss]: + clss = [x for x in clss if x.name() == "custom"] + return clss diff --git a/src/rez/cli/build.py b/src/rez/cli/build.py index 1b846994b..3b02e9b92 100644 --- a/src/rez/cli/build.py +++ b/src/rez/cli/build.py @@ -18,10 +18,6 @@ def setup_parser_common(parser): clss = get_valid_build_systems(os.getcwd()) if clss: - # 'custom' takes precedence - if "custom" in [x.name() for x in clss]: - clss = [x for x in clss if x.name() == "custom"] - if len(clss) == 1: cls = clss[0] cls.bind_cli(parser) diff --git a/src/rez/tests/data/builds/packages/hello/1.0/Makefile b/src/rez/tests/data/builds/packages/hello/1.0/Makefile new file mode 100644 index 000000000..7ffb0f12d --- /dev/null +++ b/src/rez/tests/data/builds/packages/hello/1.0/Makefile @@ -0,0 +1,8 @@ + +hai: ${REZ_BUILD_SOURCE_PATH}/lib/main.cpp + g++ -o hai ${REZ_BUILD_SOURCE_PATH}/lib/main.cpp + +.PHONY: install +install: hai + mkdir -p ${REZ_BUILD_INSTALL_PATH}/bin + cp $< ${REZ_BUILD_INSTALL_PATH}/bin/hai diff --git a/src/rez/tests/data/builds/packages/hello/1.0/lib/main.cpp b/src/rez/tests/data/builds/packages/hello/1.0/lib/main.cpp new file mode 100644 index 000000000..d54910611 --- /dev/null +++ b/src/rez/tests/data/builds/packages/hello/1.0/lib/main.cpp @@ -0,0 +1,8 @@ +#include + + +int main(int argc, char** argv) +{ + std::cout << "Oh hai!" << std::endl; + return 0; +} diff --git a/src/rez/tests/data/builds/packages/hello/1.0/package.py b/src/rez/tests/data/builds/packages/hello/1.0/package.py new file mode 100644 index 000000000..f144ce5c3 --- /dev/null +++ b/src/rez/tests/data/builds/packages/hello/1.0/package.py @@ -0,0 +1,13 @@ + +name = 'hello' +version = '1.0' +authors = ["dr.foo"] +uuid = "110c80c135c142479d47e756bdbbddf8" +description = "A very simple C++ project." + +tools = ['hai'] + +build_command = "make -f {root}/Makefile {install}" + +def commands(): + env.PATH.append('{root}/bin') diff --git a/src/rez/tests/test_build.py b/src/rez/tests/test_build.py index 103e8e107..8ca546fb9 100644 --- a/src/rez/tests/test_build.py +++ b/src/rez/tests/test_build.py @@ -8,7 +8,7 @@ PackageFamilyNotFoundError import rez.vendor.unittest2 as unittest from rez.tests.util import TestBase, TempdirMixin, find_file_in_path, \ - shell_dependent, install_dependent, cmake_dependent + shell_dependent, install_dependent, program_dependent import shutil import os.path @@ -144,13 +144,26 @@ def test_builds_anti(self): self._test_build_floob() self._test_build_anti() - @cmake_dependent + @program_dependent("cmake") def test_build_cmake(self): + """Test a cmake-based package.""" self.assertRaises(PackageFamilyNotFoundError, self._create_context, - "sup_world==3.8") + "sup_world==3.8") self._test_build_translate_lib() self._test_build_sup_world() + @program_dependent("make", "g++") + def test_build_custom(self): + """Test a make-based package that uses the custom_build attribute.""" + from subprocess import PIPE + + self._test_build("hello", "1.0") + context = self._create_context("hello==1.0") + + proc = context.execute_command(['hai'], stdout=PIPE) + stdout = proc.communicate()[0] + self.assertEqual('Oh hai!', stdout.strip()) + if __name__ == '__main__': unittest.main() diff --git a/src/rez/tests/util.py b/src/rez/tests/util.py index d005866f1..968393fbc 100644 --- a/src/rez/tests/util.py +++ b/src/rez/tests/util.py @@ -83,8 +83,10 @@ def setUpClass(cls): @classmethod def tearDownClass(cls): - if os.path.exists(cls.root): - shutil.rmtree(cls.root) + if not os.getenv("REZ_KEEP_TMPDIRS"): + if os.path.exists(cls.root): + shutil.rmtree(cls.root) + def find_file_in_path(to_find, path_str, pathsep=None, reverse=True): """Attempts to find the given relative path to_find in the given path @@ -100,30 +102,44 @@ def find_file_in_path(to_find, path_str, pathsep=None, reverse=True): return test_path return None -_CMAKE_EXISTS = None -def cmake_exists(): - """Tests whether cmake is available""" - global _CMAKE_EXISTS - if _CMAKE_EXISTS is None: - import subprocess - import errno +program_tests = { + "cmake": ['cmake', '-h'], + "make": ['make', '-h'], + "g++": ["g++", "--help"] +} + + +def program_dependent(program_name, *program_names): + + # test if program exists + import subprocess + import errno + + def _test(name): + command = program_tests[name] with open(os.devnull, 'wb') as DEVNULL: try: - subprocess.check_call(['cmake', '-h'], stdout=DEVNULL, - stderr=DEVNULL) + subprocess.check_call(command, stdout=DEVNULL, stderr=DEVNULL) except (OSError, IOError, subprocess.CalledProcessError): - _CMAKE_EXISTS = False + return False else: - _CMAKE_EXISTS = True - return _CMAKE_EXISTS - -def cmake_dependent(fn): - """Function decorator that skips the test if cmake is not available""" - if not cmake_exists(): - return unittest.skip('cmake not available')(fn) - return fn + return True + + names = [program_name] + list(program_names) + exists = all(_test(x) for x in names) + + if exists: + def wrapper(fn): + return fn + + else: + def wrapper(fn): + return unittest.skip("Program(s) not available: %s" % names)(fn) + + return wrapper + def shell_dependent(exclude=None): """Function decorator that runs the function over all shell types.""" diff --git a/src/rezplugins/build_system/custom.py b/src/rezplugins/build_system/custom.py index dfcc8bd3a..160ac4af6 100644 --- a/src/rezplugins/build_system/custom.py +++ b/src/rezplugins/build_system/custom.py @@ -59,7 +59,6 @@ def bind_cli(cls, parser): with open("./parse_build_args.py") as f: source = f.read() except Exception as e: - print str(e) return # detect what extra args have been added @@ -106,15 +105,20 @@ def build(self, context, variant, build_path, install_path, install=False, # get build command command = self.package.build_command + def expand(txt): + root = self.package.root + install_ = "install" if install else '' + return txt.format(root=root, install=install_).strip() + if isinstance(command, basestring): if self.build_args: command = command + ' ' + ' '.join(map(quote, self.build_args)) - command = self.package.format(command) + command = expand(command) cmd_str = command else: # list command = command + self.build_args - command = map(self.package.format, command) + command = map(expand, command) cmd_str = ' '.join(map(quote, command)) if self.verbose: @@ -132,22 +136,23 @@ def _callback(executor): build_path=build_path, install_path=install_path) - # write args defined in ./parse_build_args.py out as env vars - extra_args = getattr(self.opts.parser, "_rezbuild_extra_args", []) + if self.opts: + # write args defined in ./parse_build_args.py out as env vars + extra_args = getattr(self.opts.parser, "_rezbuild_extra_args", []) - for key, value in vars(self.opts).iteritems(): - if key in extra_args: - varname = "__PARSE_ARG_%s" % key.upper() + for key, value in vars(self.opts).iteritems(): + if key in extra_args: + varname = "__PARSE_ARG_%s" % key.upper() - # do some value conversions - if isinstance(value, bool): - value = 1 if value else 0 - elif isinstance(value, (list, tuple)): - value = map(str, value) - value = map(quote, value) - value = ' '.join(value) + # do some value conversions + if isinstance(value, bool): + value = 1 if value else 0 + elif isinstance(value, (list, tuple)): + value = map(str, value) + value = map(quote, value) + value = ' '.join(value) - executor.env[varname] = value + executor.env[varname] = value retcode, _, _ = context.execute_shell(command=command, block=True, From d5606da00a8d9b4a011d359cca1cc970d894f563 Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 11 Mar 2017 15:01:43 +1100 Subject: [PATCH 056/124] version up --- src/rez/utils/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index 874b40f9d..f77d0e063 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.8.0" +_rez_version = "2.9.0" # Copyright 2013-2016 Allan Johns. From 8ab0d74f12019e4daf6851a5cd361a72f07bbc53 Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 11 Mar 2017 15:32:08 +1100 Subject: [PATCH 057/124] -added 'isolate' flag so we have more control over whether globals dict is changed or not --- src/rez/resolved_context.py | 2 +- src/rez/rex.py | 28 +++++++++++++++++++--------- 2 files changed, 20 insertions(+), 10 deletions(-) diff --git a/src/rez/resolved_context.py b/src/rez/resolved_context.py index fa692a159..52abbe500 100644 --- a/src/rez/resolved_context.py +++ b/src/rez/resolved_context.py @@ -1558,7 +1558,7 @@ def _minor_heading(txt): commands.set_package(pkg) try: - executor.execute_code(commands) + executor.execute_code(commands, isolate=True) except error_class as e: exc = e diff --git a/src/rez/rex.py b/src/rez/rex.py index 9c44eed1a..899b24724 100644 --- a/src/rez/rex.py +++ b/src/rez/rex.py @@ -1168,27 +1168,37 @@ def compile_code(cls, code, filename=None, exec_namespace=None): return pyc - def execute_code(self, code, filename=None): + def execute_code(self, code, filename=None, isolate=False): """Execute code within the execution context. Args: code (str or SourceCode): Rex code to execute. filename (str): Filename to report if there are syntax errors. + isolate (bool): If True, do not affect `self.globals` by executing + this code. """ + def _apply(): + self.compile_code(code=code, + filename=filename, + exec_namespace=self.globals) + # we want to execute the code using self.globals - if for no other # reason that self.formatter is pointing at self.globals, so if we # passed in a copy, we would also need to make self.formatter "look" at # the same copy - but we don't want to "pollute" our namespace, because # the same executor may be used to run multiple packages. Therefore, # we save a copy of self.globals before execution, and restore it after - saved_globals = dict(self.globals) - try: - self.compile_code(code=code, - filename=filename, - exec_namespace=self.globals) - finally: - self.globals.clear() - self.globals.update(saved_globals) + # + if isolate: + saved_globals = dict(self.globals) + + try: + _apply() + finally: + self.globals.clear() + self.globals.update(saved_globals) + else: + _apply() def execute_function(self, func, *nargs, **kwargs): """ From 0f8eb53371e13dce6e64ea48534ac90e1e948718 Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 11 Mar 2017 15:33:02 +1100 Subject: [PATCH 058/124] version up --- src/rez/utils/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index f77d0e063..02f470b2f 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.9.0" +_rez_version = "2.9.1" # Copyright 2013-2016 Allan Johns. From 8c5b81895c36d5c78e7ac23a0f8037e5e49cd100 Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 11 Mar 2017 15:59:17 +1100 Subject: [PATCH 059/124] minor docstring changes in rezconfig.py only. --- src/rez/rezconfig.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/src/rez/rezconfig.py b/src/rez/rezconfig.py index 9130bf9ba..083a9afdb 100644 --- a/src/rez/rezconfig.py +++ b/src/rez/rezconfig.py @@ -2,6 +2,7 @@ Rez configuration settings. Do not change this file. Settings are determined in the following way: + 1) The setting is first read from this file; 2) The setting is then overridden if it is present in another settings file pointed at by the $REZ_CONFIG_FILE environment variable; @@ -28,6 +29,9 @@ Paths should use the path separator appropriate for the operating system (based on Python's os.path.sep). So for Linux paths, / should be used. On Windows \ (unescaped) should be used. + +Note: The comments in this file are extracted and turned into Wiki content. Pay +attention to the comment formatting and follow the existing syle closely. """ import os @@ -667,13 +671,16 @@ # style: dim, normal, bright # Enables/disables colorization globally. -# Note: Turned off for Windows currently as there seems to be a problem with -# the Colorama module. +# +# > [[media/icons/warning.png]] Note: Turned off for Windows currently as there seems +# > to be a problem with the Colorama module. +# # May also set to the string "force", which will make rez output color styling # information, even if the the output streams are not ttys. Useful if you are # piping the output of rez, but will eventually be printing to a tty later. -# When force is used, will generally be set through an environemnt variable, ie, -# echo $(REZ_COLOR_ENABLED=force python -c "from rez.utils.colorize import Printer, local; Printer()('foo', local)") +# When force is used, will generally be set through an environment variable, eg: +# +# echo $(REZ_COLOR_ENABLED=force python -c "from rez.utils.colorize import Printer, local; Printer()('foo', local)") color_enabled = (os.name == "posix") ### Do not move or delete this comment (__DOC_END__) From 74ec3e2a51d28001e08189c2b7f58d729a5062da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Tue, 14 Mar 2017 15:37:55 -0400 Subject: [PATCH 060/124] Fix/Enh: rez-env: allow shell from config file to take precedence over system one. --- src/rez/cli/env.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/rez/cli/env.py b/src/rez/cli/env.py index 37628a175..a4f616494 100644 --- a/src/rez/cli/env.py +++ b/src/rez/cli/env.py @@ -13,7 +13,7 @@ def setup_parser(parser, completions=False): parser.add_argument( "--shell", dest="shell", type=str, choices=shells, - default=system.shell, + default=config.default_shell or system.shell, help="target shell type (default: %(default)s)") parser.add_argument( "--rcfile", type=str, From 996963f73cbc3a5969b18011a11a43c4409275a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20Starck?= Date: Tue, 7 Mar 2017 16:06:12 -0500 Subject: [PATCH 061/124] Enh/Fix: forgotten executable bit ? --- bin/rez-diff | 0 bin/rez-pip | 0 2 files changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 bin/rez-diff mode change 100644 => 100755 bin/rez-pip diff --git a/bin/rez-diff b/bin/rez-diff old mode 100644 new mode 100755 diff --git a/bin/rez-pip b/bin/rez-pip old mode 100644 new mode 100755 From 4a9f1814223ace9b6ac50a90924c76c90c0b49f0 Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 18 Mar 2017 15:42:47 +1100 Subject: [PATCH 062/124] - reimplemented expand_requirement() to be more robust, allow any valid version range - exposed all variant attribs in rex binding object - expose 'this' in @early bound package functions - strip functions, leading-__ variables from package.py - added tests for expand_requirement() - added Version.as_tuple(), and matching unit test --- src/rez/developer_package.py | 7 +- src/rez/package_py_utils.py | 133 +++++++++++++++++++++----- src/rez/rex_bindings.py | 18 +++- src/rez/serialise.py | 109 ++++++++++++++++----- src/rez/tests/test_packages.py | 40 ++++++++ src/rez/utils/_version.py | 2 +- src/rez/vendor/version/requirement.py | 5 +- src/rez/vendor/version/test.py | 3 + src/rez/vendor/version/version.py | 67 ++++++++++--- 9 files changed, 314 insertions(+), 70 deletions(-) diff --git a/src/rez/developer_package.py b/src/rez/developer_package.py index ec8b6f17a..273293327 100644 --- a/src/rez/developer_package.py +++ b/src/rez/developer_package.py @@ -124,10 +124,9 @@ def _get_preprocessed(self, data): from copy import deepcopy with add_sys_paths(config.package_definition_build_python_paths): - preprocess = getattr(self, "preprocess", None) + preprocess_func = getattr(self, "preprocess", None) - if preprocess: - preprocess_func = preprocess.func + if preprocess_func: print_info("Applying preprocess from package.py") else: # load globally configured preprocess function @@ -173,7 +172,7 @@ def _get_preprocessed(self, data): % (e.__class__.__name__, str(e))) return None - # if preprocess added functions, these need to be converted to + # if preprocess added functions, these may need to be converted to # SourceCode instances preprocessed_data = process_python_objects(preprocessed_data) diff --git a/src/rez/package_py_utils.py b/src/rez/package_py_utils.py index a1066ee22..2bc95c88b 100644 --- a/src/rez/package_py_utils.py +++ b/src/rez/package_py_utils.py @@ -1,6 +1,6 @@ """ -This sourcefile is intended to only be imported in package.py files, in -functions including: +This sourcefile is intended to be imported in package.py files, in functions +including: - the special 'preprocess' function; - early bound functions that use the @early decorator. @@ -11,20 +11,36 @@ from rez.exceptions import InvalidPackageError -def expand_requirement(request): - """Expands a requirement string like 'python-2.*' +def expand_requirement(request, paths=None): + """Expands a requirement string like 'python-2.*', 'foo-2.*+<*', etc. - Only trailing wildcards are supported; they will be replaced with the - latest package version found within the range. If none are found, the - wildcards will just be stripped. + Wildcards are expanded to the latest version that matches. There is also a + special wildcard '**' that will expand to the full version, but it cannot + be used in combination with '*'. - Example: + Wildcards MUST placehold a whole version token, not partial - while 'foo-2.*' + is valid, 'foo-2.v*' is not. + + Wildcards MUST appear at the end of version numbers - while 'foo-1.*.*' is + valid, 'foo-1.*.0' is not. + + It is possible that an expansion will result in an invalid request string + (such as 'foo-2+<2'). The appropriate exception will be raised if this + happens. + + Examples: >>> print expand_requirement('python-2.*') python-2.7 + >>> print expand_requirement('python==2.**') + python==2.7.12 + >>> print expand_requirement('python<**') + python<3.0.5 Args: request (str): Request to expand, eg 'python-2.*' + paths (list of str, optional): paths to search for package families, + defaults to `config.packages_path`. Returns: str: Expanded request string. @@ -32,25 +48,96 @@ def expand_requirement(request): if '*' not in request: return request - from rez.vendor.version.requirement import VersionedObject, Requirement + from rez.vendor.version.version import VersionRange + from rez.vendor.version.requirement import Requirement from rez.packages_ import get_latest_package + from uuid import uuid4 - txt = request.replace('*', '_') - obj = VersionedObject(txt) - rank = len(obj.version) - + wildcard_map = {} + expanded_versions = {} request_ = request - while request_.endswith('*'): - request_ = request_[:-2] # strip sep + * - - req = Requirement(request_) - package = get_latest_package(name=req.name, range_=req.range_) - - if package is None: - return request_ - obj.version_ = package.version.trim(rank) - return str(obj) + # replace wildcards with valid version tokens that can be replaced again + # afterwards. This produces a horrendous, but both valid and temporary, + # version string. + # + while "**" in request_: + uid = "_%s_" % uuid4().hex + request_ = request_.replace("**", uid, 1) + wildcard_map[uid] = "**" + + while '*' in request_: + uid = "_%s_" % uuid4().hex + request_ = request_.replace('*', uid, 1) + wildcard_map[uid] = '*' + + # create the requirement, then expand wildcards + # + req = Requirement(request_, invalid_bound_error=False) + + def expand_version(version): + rank = len(version) + wildcard_found = False + + while version and str(version[-1]) in wildcard_map: + token = wildcard_map[str(version[-1])] + version = version.trim(len(version) - 1) + + if token == "**": + if wildcard_found: # catches bad syntax '**.*' + return None + else: + wildcard_found = True + rank = 0 + break + + wildcard_found = True + + if not wildcard_found: + return None + + range_ = VersionRange(str(version)) + package = get_latest_package(name=req.name, range_=range_, paths=paths) + + if package is None: + return version + + if rank: + return package.version.trim(rank) + else: + return package.version + + def visit_version(version): + # requirements like 'foo-1' are actually represented internally as + # 'foo-1+<1_' - '1_' is the next possible version after '1'. So we have + # to detect this case and remap the uid-ified wildcard back here too. + # + for v, expanded_v in expanded_versions.iteritems(): + if version == v.next(): + return expanded_v.next() + + version_ = expand_version(version) + if version_ is None: + return None + + expanded_versions[version] = version_ + return version_ + + if req.range_ is not None: + req.range_.visit_versions(visit_version) + + result = str(req) + + # do some cleanup so that long uids aren't left in invalid wildcarded strings + for uid, token in wildcard_map.iteritems(): + result = result.replace(uid, token) + + # cast back to a Requirement again, then back to a string. This will catch + # bad verison ranges, but will also put OR'd version ranges into the correct + # order + expanded_req = Requirement(result) + + return str(expanded_req) def expand_requires(*requests): diff --git a/src/rez/rex_bindings.py b/src/rez/rex_bindings.py index 90a053b77..339cbf943 100644 --- a/src/rez/rex_bindings.py +++ b/src/rez/rex_bindings.py @@ -103,14 +103,22 @@ def __iter__(self): class VariantBinding(Binding): """Binds a packages.Variant object.""" def __init__(self, variant): - doc = dict( - name=variant.name, - version=VersionBinding(variant.version), - base=variant.base, - root=variant.root) + doc = dict(version=VersionBinding(variant.version)) super(VariantBinding, self).__init__(doc) self.__variant = variant + # hacky, but we'll be deprecating all these bindings.. + def __getattr__(self, attr): + try: + return super(VariantBinding, self).__getattr__(attr) + except: + missing = object() + value = getattr(self.__variant, attr, missing) + if value is missing: + raise + + return value + def _attr_error(self, attr): raise AttributeError("package %s has no attribute '%s'" % (str(self), attr)) diff --git a/src/rez/serialise.py b/src/rez/serialise.py index a885b0787..87eee73bd 100644 --- a/src/rez/serialise.py +++ b/src/rez/serialise.py @@ -170,40 +170,84 @@ def load_py(stream, filepath=None): return result +class EarlyThis(object): + """The 'this' object for @early bound functions.""" + def __init__(self, data): + self._data = data + + def __getattr__(self, attr): + missing = object() + value = self._data.get(attr, missing) + if value is missing: + raise AttributeError("No such package attribute '%s'" % attr) + + if isfunction(value) and (hasattr(value, "_early") or hasattr(value, "_late")): + raise ValueError( + "An early binding function cannot refer to another early or " + "late binding function: '%s'" % attr) + + return value + + def process_python_objects(data, filepath=None): + """Replace certain values in the given package data dict. - _remove = object() + Does things like: + * evaluates @early decorated functions, and replaces with return value; + * converts functions into `SourceCode` instances so they can be serialized + out to installed packages, and evaluated later; + * strips some values (modules, __-leading variables) that are never to be + part of installed packages. + Returns: + dict: Updated dict. + """ def _process(value): if isinstance(value, dict): for k, v in value.items(): - new_value = _process(v) - - if new_value is _remove: - del value[k] - else: - value[k] = new_value + value[k] = _process(v) return value elif isfunction(value): - if hasattr(value, "_early"): + func = value + + if hasattr(func, "_early"): # run the function now, and replace with return value - with add_sys_paths(config.package_definition_build_python_paths): - func = value + # + + # make a copy of the func with its own globals, and add 'this' + import types + fn = types.FunctionType(func.func_code, + func.func_globals.copy(), + name=func.func_name, + argdefs=func.func_defaults, + closure=func.func_closure) + this = EarlyThis(data) + fn.func_globals.update({"this": this}) + + with add_sys_paths(config.package_definition_build_python_paths): + # this 'data' arg support isn't needed anymore, but I'm + # supporting it til I know nobody is using it... + # spec = getargspec(func) args = spec.args or [] if len(args) not in (0, 1): raise ResourceError("@early decorated function must " "take zero or one args only") if args: - value_ = func(data) + value_ = fn(data) else: - value_ = func() + value_ = fn() # process again in case this is a function returning a function return _process(value_) - else: + + elif hasattr(func, "_late"): + return SourceCode(func=func, filepath=filepath, + eval_as_function=True) + + elif func.__name__ in package_rex_keys: # if a rex function, the code has to be eval'd NOT as a function, # otherwise the globals dict doesn't get updated with any vars # defined in the code, and that means rex code like this: @@ -214,20 +258,37 @@ def _process(value): # ..won't work. It was never intentional that the above work, but # it does, so now we have to keep it so. # - as_function = (value.__name__ not in package_rex_keys) - - return SourceCode(func=value, filepath=filepath, - eval_as_function=as_function) - elif ismodule(value): - # modules cannot be installed as package attributes. They are present - # in developer packages sometimes though - it's fine for a package - # attribute to use an imported module at build time. - # - return _remove + return SourceCode(func=func, filepath=filepath, + eval_as_function=False) + + else: + # a normal function. Leave unchanged, it will be stripped after + return func else: return value - return _process(data) + def _trim(value): + if isinstance(value, dict): + for k, v in value.items(): + if isfunction(v): + if v.__name__ == "preprocess": + # preprocess is a special case. It has to stay intact + # until the `DeveloperPackage` has a chance to apply it; + # after which it gets removed from the package attributes. + # + pass + else: + del value[k] + elif ismodule(v) or k.startswith("__"): + del value[k] + else: + value[k] = _trim(v) + + return value + + data = _process(data) + data = _trim(data) + return data def load_yaml(stream, **kwargs): diff --git a/src/rez/tests/test_packages.py b/src/rez/tests/test_packages.py index bc1e60f91..37978138b 100644 --- a/src/rez/tests/test_packages.py +++ b/src/rez/tests/test_packages.py @@ -5,11 +5,13 @@ create_package, get_developer_package from rez.package_resources_ import package_release_keys from rez.package_repository import create_memory_package_repository +from rez.package_py_utils import expand_requirement from rez.tests.util import TestBase, TempdirMixin from rez.utils.formatting import PackageRequest from rez.utils.sourcecode import SourceCode import rez.vendor.unittest2 as unittest from rez.vendor.version.version import Version +from rez.vendor.version.util import VersionError import os.path import os @@ -277,6 +279,44 @@ def _data(obj): data_ = _data(installed_package) self.assertDictEqual(data, data_) + def test_8(self): + """test expand_requirement function.""" + tests = ( + ("pyfoo", "pyfoo"), + ("pyfoo-3", "pyfoo-3"), + ("pyfoo-3.0", "pyfoo-3.0"), + ("pyfoo-*", "pyfoo-3"), + ("pyfoo-**", "pyfoo-3.1.0"), + ("pysplit==**", "pysplit==7"), + ("python-*+<**", "python-2+<2.7.0"), + ("python-2.6.*+<**", "python-2.6.8+<2.7.0"), + ("python-2.5|**", "python-2.5|2.7.0"), + ("notexist-1.2.3", "notexist-1.2.3"), + ("pysplit-6.*", "pysplit-6"), + ("pyfoo-3.0.0.**", "pyfoo-3.0.0"), + ("python-55", "python-55"), + + # some trickier cases, VersionRange construction rules still apply + ("python-**|2.5", "python-2.5|2.7.0"), + ("python-2.*|**", "python-2.7") + ) + + bad_tests = ( + "python-*.**", + "python-1.*.**", + "python-1.*.1", + "python-1.v*", + "python-1.**.*", + "python-1.**.1" + ) + + for req, expanded_req in tests: + result = expand_requirement(req) + self.assertEqual(result, expanded_req) + + for req in bad_tests: + self.assertRaises(VersionError, expand_requirement, req) + class TestMemoryPackages(TestBase): def test_1_memory_variant_parent(self): diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index 02f470b2f..8d79de4c5 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.9.1" +_rez_version = "2.10.0" # Copyright 2013-2016 Allan Johns. diff --git a/src/rez/vendor/version/requirement.py b/src/rez/vendor/version/requirement.py index ae5bb638b..d60e7e2c1 100644 --- a/src/rez/vendor/version/requirement.py +++ b/src/rez/vendor/version/requirement.py @@ -109,7 +109,7 @@ class Requirement(_Common): """ sep_regex = re.compile(r'[-@#=<>]') - def __init__(self, s): + def __init__(self, s, invalid_bound_error=True): self.name_ = None self.range_ = None self.negate_ = False @@ -136,7 +136,8 @@ def __init__(self, s): self.sep_ = req_str[0] req_str = req_str[1:] - self.range_ = VersionRange(req_str) + self.range_ = VersionRange( + req_str, invalid_bound_error=invalid_bound_error) if self.negate_: self.range_ = ~self.range_ elif self.negate_: diff --git a/src/rez/vendor/version/test.py b/src/rez/vendor/version/test.py index e3aef07d7..f57154cd0 100644 --- a/src/rez/vendor/version/test.py +++ b/src/rez/vendor/version/test.py @@ -77,6 +77,9 @@ def _create_random_version(self): for i in range(random.randint(0, 6))) return Version(ver_str, make_token=self.make_token) + def test_misc(self): + self.assertEqual(Version("1.2.12").as_tuple(), ("1", "2", "12")) + def test_token_strict_weak_ordering(self): # test equal tokens tok = self._create_random_token() diff --git a/src/rez/vendor/version/version.py b/src/rez/vendor/version/version.py index 146836a1a..6ac8c94fa 100644 --- a/src/rez/vendor/version/version.py +++ b/src/rez/vendor/version/version.py @@ -298,6 +298,16 @@ def patch(self): """Semantic versioning patch version.""" return self[2] + def as_tuple(self): + """Convert to a tuple of strings. + + Example: + + >>> print Version("1.2.12").as_tuple() + ('1', '2', '12') + """ + return tuple(map(str, self.tokens)) + def __len__(self): return len(self.tokens or []) @@ -408,12 +418,14 @@ def contains_version(self, version): class _Bound(_Comparable): any = None - def __init__(self, lower=None, upper=None): + def __init__(self, lower=None, upper=None, invalid_bound_error=True): self.lower = lower or _LowerBound.min self.upper = upper or _UpperBound.inf - if (self.lower.version > self.upper.version) \ - or ((self.lower.version == self.upper.version) - and not (self.lower.inclusive and self.upper.inclusive)): + + if (invalid_bound_error and + (self.lower.version > self.upper.version + or ((self.lower.version == self.upper.version) + and not (self.lower.inclusive and self.upper.inclusive)))): raise VersionError("Invalid bound") def __str__(self): @@ -531,11 +543,12 @@ class _VersionRangeParser(object): regex = re.compile(version_range_regex, re_flags) - def __init__(self, input_string, make_token): + def __init__(self, input_string, make_token, invalid_bound_error=True): self.make_token = make_token self._groups = {} self._input_string = input_string self.bounds = [] + self.invalid_bound_error = invalid_bound_error for part in input_string.split("|"): if part == '': @@ -546,8 +559,7 @@ def __init__(self, input_string, make_token): match = re.search(self.regex, part) if not match: - raise ParseException("Syntax error in version range '%s'" - % part) + raise ParseException("Syntax error in version range '%s'" % part) self._groups = match.groupdict() if self._groups['version']: @@ -613,7 +625,7 @@ def _act_bound(self): upper_version = self._create_version_from_token(self._groups['inclusive_upper_version']) upper_bound = _UpperBound(upper_version, True) - self.bounds.append(_Bound(lower_bound, upper_bound)) + self.bounds.append(_Bound(lower_bound, upper_bound, self.invalid_bound_error)) @action def _act_lower_bound(self): @@ -646,7 +658,7 @@ def _act_lower_and_upper_bound(self): exclusive = self._is_upper_bound_exclusive(self._groups['range_upper_prefix']) upper_bound = _UpperBound(version, not exclusive) - self.bounds.append(_Bound(lower_bound, upper_bound)) + self.bounds.append(_Bound(lower_bound, upper_bound, self.invalid_bound_error)) class VersionRange(_Comparable): @@ -698,7 +710,8 @@ class VersionRange(_Comparable): with a comma, eg ">=2,<=6". The comma is purely cosmetic and is dropped in the string representation. """ - def __init__(self, range_str='', make_token=AlphanumericVersionToken): + def __init__(self, range_str='', make_token=AlphanumericVersionToken, + invalid_bound_error=True): """Create a VersionRange object. Args: @@ -706,6 +719,8 @@ def __init__(self, range_str='', make_token=AlphanumericVersionToken): will be optimised, so the string representation of this instance may not match range_str. For example, "3+<6|4+<8" == "3+<8". make_token: Version token class to use. + invalid_bound_error (bool): If True, raise an exception if an + impossible range is given, such as '3+<2'. """ self._str = None self.bounds = [] @@ -713,7 +728,8 @@ def __init__(self, range_str='', make_token=AlphanumericVersionToken): return try: - parser = _VersionRangeParser(range_str, make_token) + parser = _VersionRangeParser(range_str, make_token, + invalid_bound_error=invalid_bound_error) bounds = parser.bounds except ParseException as e: raise VersionError("Syntax error in version range '%s': %s" @@ -1001,6 +1017,35 @@ def span(self): other.bounds = [bound] return other + def visit_versions(self, func): + """Visit each version in the range, and apply a function to each. + + This is for advanced usage only. + + If `func` returns a `Version`, this call will change the versions in + place. + + It is possible to change versions in a way that is nonsensical - for + example setting an upper bound to a smaller version than the lower bound. + Use at your own risk. + + Args: + func (callable): Takes a `Version` instance arg, and is applied to + every version in the range. If `func` returns a `Version`, it + will replace the existing version, updating this `VersionRange` + instance in place. + """ + for bound in self.bounds: + if bound.lower is not _LowerBound.min: + result = func(bound.lower.version) + if isinstance(result, Version): + bound.lower.version = result + + if bound.upper is not _UpperBound.inf: + result = func(bound.upper.version) + if isinstance(result, Version): + bound.upper.version = result + def __contains__(self, version_or_range): if isinstance(version_or_range, Version): return self.contains_version(version_or_range) From 19a22b9c91e3f42adaeaeabaaecf61d235637a79 Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 21 Mar 2017 14:11:41 +1100 Subject: [PATCH 063/124] -minor docstring fix --- src/rez/rezconfig.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/src/rez/rezconfig.py b/src/rez/rezconfig.py index 083a9afdb..59dd6e905 100644 --- a/src/rez/rezconfig.py +++ b/src/rez/rezconfig.py @@ -187,17 +187,17 @@ # The map supports regular expression e.g. to keep versions. # Please note that following examples are not necessarily recommendations. # -# platform_map = { -# "os": { -# r"Scientific Linux-(.*)": r"Scientific-\1", # Scientific Linux-x.x -> Scientific-x.x -# r"Ubuntu-14.\d": r"Ubuntu-14", # Any Ubuntu-14.x -> Ubuntu-14 -# r'CentOS Linux-(\d+)\.(\d+)(\.(\d+))?': r'CentOS-\1.\2', ' # Centos Linux-X.Y.Z -> CentOS-X.Y -# }, -# "arch": { -# "x86_64": "64bit", # Maps both x86_64 and amd64 -> 64bit -# "amd64": "64bit", -# }, -# } +# platform_map = { +# "os": { +# r"Scientific Linux-(.*)": r"Scientific-\1", # Scientific Linux-x.x -> Scientific-x.x +# r"Ubuntu-14.\d": r"Ubuntu-14", # Any Ubuntu-14.x -> Ubuntu-14 +# r'CentOS Linux-(\d+)\.(\d+)(\.(\d+))?': r'CentOS-\1.\2', ' # Centos Linux-X.Y.Z -> CentOS-X.Y +# }, +# "arch": { +# "x86_64": "64bit", # Maps both x86_64 and amd64 -> 64bit +# "amd64": "64bit", +# }, +# } platform_map = {} # If true, then when a resolve graph is generated during a failed solve, packages From 15008121cbc3f8e4872571c5170615384454dcae Mon Sep 17 00:00:00 2001 From: Thorsten Kaufmann Date: Tue, 21 Mar 2017 09:56:48 +0100 Subject: [PATCH 064/124] Add support for multiple paths in REZ_CONFIG_FILE. Paths are simply appended in the order they appear. --- src/rez/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/rez/config.py b/src/rez/config.py index 99103daea..44294e17c 100644 --- a/src/rez/config.py +++ b/src/rez/config.py @@ -541,7 +541,7 @@ def _create_main_config(cls, overrides=None): filepaths.append(get_module_root_config()) filepath = os.getenv("REZ_CONFIG_FILE") if filepath: - filepaths.append(filepath) + filepaths.extend(filepath.split(os.pathsep)) filepath = os.path.expanduser("~/.rezconfig") filepaths.append(filepath) From 54959bf1abd7f6be4820fd1bb7da3ea1e2721c86 Mon Sep 17 00:00:00 2001 From: ajohns Date: Fri, 24 Mar 2017 15:05:46 +1100 Subject: [PATCH 065/124] -added new package orderers, unit test --- src/rez/package_order.py | 248 +++++++++++++++++- .../py_packages/timestamped/1.0.5/package.py | 5 + .../py_packages/timestamped/1.0.6/package.py | 5 + .../py_packages/timestamped/1.1.0/package.py | 5 + .../py_packages/timestamped/1.1.1/package.py | 5 + .../py_packages/timestamped/1.2.0/package.py | 5 + .../py_packages/timestamped/2.0.0/package.py | 5 + .../py_packages/timestamped/2.1.0/package.py | 5 + .../py_packages/timestamped/2.1.5/package.py | 5 + src/rez/tests/test_packages.py | 46 +++- src/rez/utils/_version.py | 2 +- 11 files changed, 326 insertions(+), 10 deletions(-) create mode 100644 src/rez/tests/data/packages/py_packages/timestamped/1.0.5/package.py create mode 100644 src/rez/tests/data/packages/py_packages/timestamped/1.0.6/package.py create mode 100644 src/rez/tests/data/packages/py_packages/timestamped/1.1.0/package.py create mode 100644 src/rez/tests/data/packages/py_packages/timestamped/1.1.1/package.py create mode 100644 src/rez/tests/data/packages/py_packages/timestamped/1.2.0/package.py create mode 100644 src/rez/tests/data/packages/py_packages/timestamped/2.0.0/package.py create mode 100644 src/rez/tests/data/packages/py_packages/timestamped/2.1.0/package.py create mode 100644 src/rez/tests/data/packages/py_packages/timestamped/2.1.5/package.py diff --git a/src/rez/package_order.py b/src/rez/package_order.py index 0559d712f..3d9f0321b 100644 --- a/src/rez/package_order.py +++ b/src/rez/package_order.py @@ -12,6 +12,9 @@ def __init__(self): def reorder(self, iterable, key=None): """Put packages into some order for consumption. + You can safely assume that the packages referred to by `iterable` are + all versions of the same package family. + Note: Returning None, and an unchanged `iterable` list, are not the same thing. Returning None may cause rez to pass the package list to the @@ -28,6 +31,9 @@ def reorder(self, iterable, key=None): """ raise NotImplementedError + def to_pod(self): + raise NotImplementedError + @property def sha1(self): return sha1(repr(self)).hexdigest() @@ -36,6 +42,216 @@ def __repr__(self): return "%s(%s)" % (self.__class__.__name__, str(self)) +class NullPackageOrder(PackageOrder): + """An orderer that does not change the order - a no op. + + This orderer is useful in cases where you want to apply some default orderer + to a set of packages, but may want to explicitly NOT reorder a particular + package. You would use a `NullPackageOrder` in a `PerFamilyOrder` to do this. + """ + name = "no_order" + + def reorder(self, iterable, key=None): + return list(iterable) + + def __str__(self): + return "{}" + + def to_pod(self): + """ + Example (in yaml): + + type: no_order + """ + return {} + + @classmethod + def from_pod(cls, data): + return cls() + + +class SortedOrder(PackageOrder): + """An orderer that sorts wrt version. + """ + name = "sorted" + + def __init__(self, descending): + self.descending = descending + + def reorder(self, iterable, key=None): + key = key or (lambda x: x) + return sorted(iterable, key=lambda x: key(x).version, + reverse=self.descending) + + def __str__(self): + return str(self.descending) + + def to_pod(self): + """ + Example (in yaml): + + type: sorted + descending: true + """ + return {"descending": self.descending} + + @classmethod + def from_pod(cls, data): + return cls(descending=data["descending"]) + + +class PerFamilyOrder(PackageOrder): + """An orderer that applies different orderers to different package families. + """ + name = "per_family" + + def __init__(self, order_dict, default_order=None): + """Create a reorderer. + + Args: + order_dict (dict of (str, `PackageOrder`): Orderers to apply to + each package family. + default_order (`PackageOrder`): Orderer to apply to any packages + not specified in `order_dict`. + """ + self.order_dict = order_dict.copy() + self.default_order = default_order + + def reorder(self, iterable, key=None): + try: + item = iter(iterable).next() + except: + return None + + key = key or (lambda x: x) + package = key(item) + + orderer = self.order_dict.get(package.name) + if orderer is None: + orderer = self.default_order + if orderer is None: + return None + + return orderer.reorder(iterable, key) + + def __str__(self): + items = sorted((x[0], str(x[1])) for x in self.order_dict.items()) + return str((items, str(self.default_order))) + + def to_pod(self): + """ + Example (in yaml): + + type: per_family + orderers: + - packages: ['foo', 'bah'] + type: version_split + first_version: '4.0.5' + - packages: ['python'] + type: sorted + descending: false + default_order: + type: sorted + descending: true + """ + orderers = {} + packages = {} + + # group package fams by orderer they use + for fam, orderer in self.order_dict.iteritems(): + k = id(orderer) + orderers[k] = orderer + packages.setdefault(k, set()).add(fam) + + orderlist = [] + for k, fams in packages.iteritems(): + orderer = orderers[k] + data = to_pod(orderer) + data["packages"] = sorted(fams) + orderlist.append(data) + + result = {"orderers": orderlist} + + if self.default_order is not None: + result["default_order"] = to_pod(self.default_order) + + return result + + @classmethod + def from_pod(cls, data): + order_dict = {} + default_order = None + + for d in data["orderers"]: + d = d.copy() + fams = d.pop("packages") + orderer = from_pod(d) + + for fam in fams: + order_dict[fam] = orderer + + d = data.get("default_order") + if d: + default_order = from_pod(d) + + return cls(order_dict, default_order) + + +class VersionSplitPackageOrder(PackageOrder): + """Orders package versions <= a given version first. + + For example, given the versions [5, 4, 3, 2, 1], an orderer initialized + with version=3 would give the order [3, 2, 1, 5, 4]. + """ + name = "version_split" + + def __init__(self, first_version): + """Create a reorderer. + + Args: + first_version (`Version`): Start with versions <= this value. + """ + self.first_version = first_version + + def reorder(self, iterable, key=None): + key = key or (lambda x: x) + + # sort by version descending + descending = sorted(iterable, key=lambda x: key(x).version, reverse=True) + + above = [] + below = [] + is_above = True + + for item in descending: + if is_above: + package = key(item) + is_above = (package.version > self.first_version) + + if is_above: + above.append(item) + else: + below.append(item) + + return below + above + + def __str__(self): + return str(self.first_version) + + def to_pod(self): + """ + Example (in yaml): + + type: version_split + first_version: "3.0.0" + """ + return dict(first_version=self.first_version) + + @classmethod + def from_pod(cls, data): + return cls(data["first_version"]) + + class TimestampPackageOrder(PackageOrder): """A timestamp order function. @@ -146,7 +362,17 @@ def reorder(self, iterable, key=None): after_.extend(reversed(postrank)) return before + after_ + def __str__(self): + return str((self.timestamp, self.rank)) + def to_pod(self): + """ + Example (in yaml): + + type: soft_timestamp + timestamp: 1234567 + rank: 3 + """ return dict(timestamp=self.timestamp, rank=self.rank) @@ -155,20 +381,26 @@ def from_pod(cls, data): return cls(timestamp=data["timestamp"], rank=data["rank"]) - def __str__(self): - return str(self.to_pod()) - def to_pod(orderer): - data_ = orderer.to_pod() - data = (orderer.name, data_) + data = {"type": orderer.name} + data.update(orderer.to_pod()) return data def from_pod(data): - cls_name, data_ = data - cls = _orderers[cls_name] - return cls.from_pod(data_) + if isinstance(data, dict): + cls_name = data["type"] + data = data.copy() + data.pop("type") + + cls = _orderers[cls_name] + return cls.from_pod(data) + else: + # old-style, kept for backwards compatibility + cls_name, data_ = data + cls = _orderers[cls_name] + return cls.from_pod(data_) def register_orderer(cls): diff --git a/src/rez/tests/data/packages/py_packages/timestamped/1.0.5/package.py b/src/rez/tests/data/packages/py_packages/timestamped/1.0.5/package.py new file mode 100644 index 000000000..97bc25660 --- /dev/null +++ b/src/rez/tests/data/packages/py_packages/timestamped/1.0.5/package.py @@ -0,0 +1,5 @@ +name = 'timestamped' + +version = "1.0.5" + +timestamp = 1000 diff --git a/src/rez/tests/data/packages/py_packages/timestamped/1.0.6/package.py b/src/rez/tests/data/packages/py_packages/timestamped/1.0.6/package.py new file mode 100644 index 000000000..6560ecb98 --- /dev/null +++ b/src/rez/tests/data/packages/py_packages/timestamped/1.0.6/package.py @@ -0,0 +1,5 @@ +name = 'timestamped' + +version = "1.0.5" + +timestamp = 2000 diff --git a/src/rez/tests/data/packages/py_packages/timestamped/1.1.0/package.py b/src/rez/tests/data/packages/py_packages/timestamped/1.1.0/package.py new file mode 100644 index 000000000..7da9914f8 --- /dev/null +++ b/src/rez/tests/data/packages/py_packages/timestamped/1.1.0/package.py @@ -0,0 +1,5 @@ +name = 'timestamped' + +version = "1.0.5" + +timestamp = 3000 diff --git a/src/rez/tests/data/packages/py_packages/timestamped/1.1.1/package.py b/src/rez/tests/data/packages/py_packages/timestamped/1.1.1/package.py new file mode 100644 index 000000000..ca2bf99be --- /dev/null +++ b/src/rez/tests/data/packages/py_packages/timestamped/1.1.1/package.py @@ -0,0 +1,5 @@ +name = 'timestamped' + +version = "1.0.5" + +timestamp = 4000 diff --git a/src/rez/tests/data/packages/py_packages/timestamped/1.2.0/package.py b/src/rez/tests/data/packages/py_packages/timestamped/1.2.0/package.py new file mode 100644 index 000000000..ab995bd94 --- /dev/null +++ b/src/rez/tests/data/packages/py_packages/timestamped/1.2.0/package.py @@ -0,0 +1,5 @@ +name = 'timestamped' + +version = "1.0.5" + +timestamp = 5000 diff --git a/src/rez/tests/data/packages/py_packages/timestamped/2.0.0/package.py b/src/rez/tests/data/packages/py_packages/timestamped/2.0.0/package.py new file mode 100644 index 000000000..55e3c7f4a --- /dev/null +++ b/src/rez/tests/data/packages/py_packages/timestamped/2.0.0/package.py @@ -0,0 +1,5 @@ +name = 'timestamped' + +version = "1.0.5" + +timestamp = 6000 diff --git a/src/rez/tests/data/packages/py_packages/timestamped/2.1.0/package.py b/src/rez/tests/data/packages/py_packages/timestamped/2.1.0/package.py new file mode 100644 index 000000000..cdaa4492a --- /dev/null +++ b/src/rez/tests/data/packages/py_packages/timestamped/2.1.0/package.py @@ -0,0 +1,5 @@ +name = 'timestamped' + +version = "1.0.5" + +timestamp = 7000 diff --git a/src/rez/tests/data/packages/py_packages/timestamped/2.1.5/package.py b/src/rez/tests/data/packages/py_packages/timestamped/2.1.5/package.py new file mode 100644 index 000000000..683541b6b --- /dev/null +++ b/src/rez/tests/data/packages/py_packages/timestamped/2.1.5/package.py @@ -0,0 +1,5 @@ +name = 'timestamped' + +version = "1.0.5" + +timestamp = 8000 diff --git a/src/rez/tests/test_packages.py b/src/rez/tests/test_packages.py index 37978138b..37790e21e 100644 --- a/src/rez/tests/test_packages.py +++ b/src/rez/tests/test_packages.py @@ -1,5 +1,5 @@ """ -test package iteration and serialization +test package iteration, serialization etc """ from rez.packages_ import iter_package_families, iter_packages, get_package, \ create_package, get_developer_package @@ -38,6 +38,8 @@ 'single_unversioned', 'single_versioned-3.5', 'late_binding-1.0', + 'timestamped-1.0.5', 'timestamped-1.0.6', 'timestamped-1.1.0', 'timestamped-1.1.1', + 'timestamped-1.2.0', 'timestamped-2.0.0', 'timestamped-2.1.0', 'timestamped-2.1.5', 'multi-1.0', 'multi-1.1', 'multi-1.2', 'multi-2.0']) @@ -317,6 +319,48 @@ def test_8(self): for req in bad_tests: self.assertRaises(VersionError, expand_requirement, req) + def test_9(self): + """test package orderers.""" + from rez.package_order import NullPackageOrder, PerFamilyOrder, \ + VersionSplitPackageOrder, TimestampPackageOrder, SortedOrder, \ + to_pod, from_pod + + def _test(orderer, package_name, expected_order): + it = iter_packages(package_name) + descending = sorted(it, key=lambda x: x.version, reverse=True) + + pod = to_pod(orderer) + orderer2 = from_pod(pod) + + for orderer_ in (orderer, orderer2): + ordered = orderer_.reorder(descending) + result = [str(x.version) for x in ordered] + self.assertEqual(result, expected_order) + + null_orderer = NullPackageOrder() + split_orderer = VersionSplitPackageOrder(Version("2.6.0")) + timestamp_orderer = TimestampPackageOrder(timestamp=3001, rank=3) + + expected_null_result = ["7", "6", "5"] + expected_split_result = ["2.6.0", "2.5.2", "2.7.0", "2.6.8"] + expected_timestamp_result = ["1.1.1", "1.1.0", "1.0.6", "1.0.5", + "1.2.0", "2.0.0", "2.1.5", "2.1.0"] + + _test(null_orderer, "pysplit", expected_null_result) + _test(split_orderer, "python", expected_split_result) + _test(timestamp_orderer, "timestamped", expected_timestamp_result) + + fam_orderer = PerFamilyOrder( + order_dict=dict(pysplit=null_orderer, + python=split_orderer, + timestamped=timestamp_orderer), + default_order=SortedOrder(descending=False)) + + _test(fam_orderer, "pysplit", expected_null_result) + _test(fam_orderer, "python", expected_split_result) + _test(fam_orderer, "timestamped", expected_timestamp_result) + _test(fam_orderer, "pymum", ["1", "2", "3"]) + class TestMemoryPackages(TestBase): def test_1_memory_variant_parent(self): diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index 8d79de4c5..0004d46c4 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.10.0" +_rez_version = "2.11.0" # Copyright 2013-2016 Allan Johns. From bff23427a046cd82e44adc63c843fae4c6b91793 Mon Sep 17 00:00:00 2001 From: Andrew Nicholas Date: Fri, 24 Mar 2017 09:17:56 +0000 Subject: [PATCH 066/124] - Added check for spaces in absolute path for install.py. Without this check, installation fails if a space is in the path due to limitation with setuptools. --- install.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/install.py b/install.py index 5ee571f1c..aa8c20f09 100644 --- a/install.py +++ b/install.py @@ -114,6 +114,11 @@ def copy_completion_scripts(dest_dir): "ie, the baked script locations may still contain symlinks") opts, args = parser.parse_args() + if " " in os.path.realpath(__file__): + err_str = "\nThe absolute path of install.py cannot contain spaces due to setuptools limitation.\n" \ + "Please move installation files to another location or rename offending folder(s).\n" + parser.error(err_str) + # determine install path if len(args) != 1: parser.error("expected DEST_DIR") From ba91e8e6a1de95f1806b29c3f3e4ceead617d0ee Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 25 Mar 2017 11:27:19 +1100 Subject: [PATCH 067/124] -added reverse_sort_key to versions --- src/rez/vendor/version/test.py | 6 ++++- src/rez/vendor/version/version.py | 42 +++++++++++++++++++++++++++++++ 2 files changed, 47 insertions(+), 1 deletion(-) diff --git a/src/rez/vendor/version/test.py b/src/rez/vendor/version/test.py index f57154cd0..63359eebf 100644 --- a/src/rez/vendor/version/test.py +++ b/src/rez/vendor/version/test.py @@ -1,5 +1,5 @@ from rez.vendor.version.version import Version, AlphanumericVersionToken, \ - VersionRange + VersionRange, reverse_sort_key, _ReversedComparable from rez.vendor.version.requirement import Requirement, RequirementList from rez.vendor.version.util import VersionError import random @@ -54,6 +54,10 @@ def _test_strict_weak_ordering(self, a, b): self.assertTrue(lt == lte) self.assertTrue(gt == gte) + if not isinstance(a, _ReversedComparable): + self._test_strict_weak_ordering(reverse_sort_key(a), + reverse_sort_key(b)) + def _test_ordered(self, items): def _test(fn, items_, op_str): for i, a in enumerate(items_): diff --git a/src/rez/vendor/version/version.py b/src/rez/vendor/version/version.py index 6ac8c94fa..ebb129a33 100644 --- a/src/rez/vendor/version/version.py +++ b/src/rez/vendor/version/version.py @@ -34,6 +34,21 @@ def __lt__(self, other): raise NotImplementedError +@total_ordering +class _ReversedComparable(_Common): + def __init__(self, value): + self.value = value + + def __lt__(self, other): + return not (self.value < other.value) + + def __str__(self): + return "reverse(%s)" % str(self.value) + + def __repr__(self): + return "reverse(%r)" % self.value + + class VersionToken(_Comparable): """Token within a version number. @@ -103,6 +118,9 @@ def create_random_token_string(cls): def __str__(self): return str(self.n) + def __eq__(self, other): + return (self.n == other.n) + def less_than(self, other): return (self.n < other.n) @@ -175,6 +193,9 @@ def create_random_token_string(cls): def __str__(self): return ''.join(map(str, self.subtokens)) + def __eq__(self, other): + return (self.subtokens == other.subtokens) + def less_than(self, other): return (self.subtokens < other.subtokens) @@ -210,6 +231,25 @@ def _parse(cls, s): return subtokens +def reverse_sort_key(comparable): + """Key that gives reverse sort order on versions and version ranges. + + Example: + + >>> Version("1.0") < Version("2.0") + True + >>> reverse_sort_key(Version("1.0")) < reverse_sort_key(Version("2.0")) + False + + Args: + comparable (`Version` or `VesionRange`): Object to wrap. + + Returns: + `_ReversedComparable`: Wrapper object that reverses comparisons. + """ + return _ReversedComparable(comparable) + + class Version(_Comparable): """Version object. @@ -1017,6 +1057,8 @@ def span(self): other.bounds = [bound] return other + # TODO have this return a new VersionRange instead - this currently breaks + # VersionRange immutability, and could invalidate __str__. def visit_versions(self, func): """Visit each version in the range, and apply a function to each. From 22af7f657c1d760ad9d13f46e004034c157de160 Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 25 Mar 2017 14:58:51 +1100 Subject: [PATCH 068/124] -added requires_rez_version, format_version attributes -warning produced when newer package formats loaded that rez may fail to load --- src/rez/package_maker__.py | 13 ++++++++ src/rez/package_resources_.py | 2 ++ src/rez/utils/_version.py | 9 +++++- .../package_repository/filesystem.py | 32 +++++++++++++++++++ 4 files changed, 55 insertions(+), 1 deletion(-) diff --git a/src/rez/package_maker__.py b/src/rez/package_maker__.py index 5724b055a..2f954f9c6 100644 --- a/src/rez/package_maker__.py +++ b/src/rez/package_maker__.py @@ -1,8 +1,10 @@ +from rez.utils._version import _rez_Version from rez.utils.schema import Required, schema_keys from rez.utils.filesystem import retain_cwd from rez.utils.formatting import PackageRequest from rez.utils.data_utils import AttrDictWrapper from rez.utils.logging_ import print_warning +from rez.exceptions import PackageMetadataError from rez.package_resources_ import help_schema, _commands_schema, \ _function_schema, late_bound from rez.package_repository import create_memory_package_repository @@ -32,6 +34,8 @@ package_schema = Schema({ + Optional("requires_rez_version"): And(basestring, Use(Version)), + Required("name"): basestring, Optional("base"): basestring, Optional("version"): Or(basestring, @@ -92,6 +96,15 @@ def get_package(self): package_data = self._get_data() package_data = package_schema.validate(package_data) + # check compatibility with rez version + if "requires_rez_version" in package_data: + ver = package_data.pop("requires_rez_version") + + if _rez_Version < ver: + raise PackageMetadataError( + "Failed reading package definition file: rez version >= %s " + "needed (current version is %s)" % (ver, _rez_Version)) + # create a 'memory' package repository containing just this package version_str = package_data.get("version") or "_NO_VERSION" repo_data = {self.name: {version_str: package_data}} diff --git a/src/rez/package_resources_.py b/src/rez/package_resources_.py index 123564027..94e050dae 100644 --- a/src/rez/package_resources_.py +++ b/src/rez/package_resources_.py @@ -26,7 +26,9 @@ # package attributes that we don't install package_build_only_keys = ( + "requires_rez_version", "build_command", + "private_build_requires", "preprocess", ) diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index 0004d46c4..1a1116d18 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,14 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.11.0" +_rez_version = "2.12.0" + +try: + from rez.vendor.version.version import Version + _rez_Version = Version(_rez_version) +except: + # the installer imports this file... + pass # Copyright 2013-2016 Allan Johns. diff --git a/src/rezplugins/package_repository/filesystem.py b/src/rezplugins/package_repository/filesystem.py index 4b574589a..df23a247b 100644 --- a/src/rezplugins/package_repository/filesystem.py +++ b/src/rezplugins/package_repository/filesystem.py @@ -11,6 +11,7 @@ ConfigurationError, PackageRepositoryError from rez.utils.formatting import is_valid_package_name, PackageRequest from rez.utils.resources import cached_property +from rez.utils.logging_ import print_warning from rez.serialise import load_from_file, FileFormat from rez.config import config from rez.utils.memcached import memcached, pool_memcached_connections @@ -22,6 +23,32 @@ import os +#------------------------------------------------------------------------------ +# format version +# +# 1: +# Initial format. +# 2: +# Late binding functions added. +#------------------------------------------------------------------------------ +format_version = 2 + + +def check_format_version(filename, data): + format_version_ = data.get("format_version") + + if format_version_ is not None: + try: + format_version_ = int(format_version_) + except: + return + + if format_version_ > format_version: + print_warning( + "Loading from %s may fail: newer format version (%d) than current " + "format version (%d)" % (filename, format_version_, format_version)) + + #------------------------------------------------------------------------------ # utilities #------------------------------------------------------------------------------ @@ -141,6 +168,7 @@ def _load(self): "Missing package definition file: %r" % self) data = load_from_file(self.filepath, self.file_format) + check_format_version(self.filepath, data) if "timestamp" not in data: # old format support data_ = self._load_old_formats() @@ -275,6 +303,7 @@ def iter_packages(self): def _load(self): format_ = FileFormat[self.ext] data = load_from_file(self.filepath, format_) + check_format_version(self.filepath, data) return data @@ -859,6 +888,9 @@ def remove_build_keys(obj): overrides = overrides or {} overrides["timestamp"] = int(time.time()) + # add the format version + package_data["format_version"] = format_version + # apply attribute overrides for key, value in overrides.iteritems(): if package_data.get(key) is None: From b582c03ff9f0f7b54129db9832cbeb50d4966237 Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 25 Mar 2017 15:20:27 +1100 Subject: [PATCH 069/124] -added package format specific format_version attrib --- src/rezplugins/package_repository/filesystem.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/rezplugins/package_repository/filesystem.py b/src/rezplugins/package_repository/filesystem.py index df23a247b..564bbfbd8 100644 --- a/src/rezplugins/package_repository/filesystem.py +++ b/src/rezplugins/package_repository/filesystem.py @@ -35,7 +35,7 @@ def check_format_version(filename, data): - format_version_ = data.get("format_version") + format_version_ = data.pop("format_version", None) if format_version_ is not None: try: @@ -809,9 +809,10 @@ def remove_build_keys(obj): if value is not None: release_data[key] = value - for key in ("base", "variants"): + for key in ("format_version", "base", "variants"): data_1.pop(key, None) data_2.pop(key, None) + package_changed = (data_1 != data_2) # special case - installing a no-variant pkg into a no-variant pkg From 384b47cd241d24236456818a46ab6347993c5b95 Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 25 Mar 2017 15:22:52 +1100 Subject: [PATCH 070/124] -minor fix in build util code --- push.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/push.sh b/push.sh index 16d52dacc..65a4974be 100755 --- a/push.sh +++ b/push.sh @@ -23,7 +23,7 @@ if [ $? -ne 0 ]; then fi # tag version -version=$(cat src/rez/utils/_version.py | grep -w _rez_version | tr '"' ' ' | awk '{print $NF}') +version=$(cat src/rez/utils/_version.py | grep -w _rez_version | head -n1 | tr '"' ' ' | awk '{print $NF}') echo "tagging ${version}..." git tag $version if [ $? -ne 0 ]; then From acec3e2a231914624d98316d64a36d08724cc143 Mon Sep 17 00:00:00 2001 From: Paul Molodowitch Date: Thu, 23 Mar 2017 10:58:58 -0700 Subject: [PATCH 071/124] allow yaml2py to take an explicit filename Handy for package-family fles (ie, maya.yaml), or anything not named package.yaml --- src/rez/cli/yaml2py.py | 17 ++++++++-------- src/rez/developer_package.py | 38 ++++++++++++++++++++++++++++++------ src/rez/packages_.py | 4 ++-- 3 files changed, 42 insertions(+), 17 deletions(-) diff --git a/src/rez/cli/yaml2py.py b/src/rez/cli/yaml2py.py index 06a0fdda0..583e0d32d 100644 --- a/src/rez/cli/yaml2py.py +++ b/src/rez/cli/yaml2py.py @@ -6,12 +6,14 @@ def setup_parser(parser, completions=False): PKG_action = parser.add_argument( "PATH", type=str, nargs='?', - help="path to search for package.yaml, cwd if not provided") + help="path to yaml to convert, or directory to search for package.yaml;" + " cwd if not provided") def command(opts, parser, extra_arg_groups=None): from rez.packages_ import get_developer_package from rez.serialise import FileFormat + from rez.exceptions import PackageMetadataError import os.path import os import sys @@ -20,17 +22,14 @@ def command(opts, parser, extra_arg_groups=None): path = os.path.expanduser(opts.PATH) else: path = os.getcwd() - if os.path.basename(path) == "package.yaml": - path = os.path.dirname(path) - filepath_yaml = os.path.join(path, "package.yaml") - if not os.path.isfile(filepath_yaml): - print >> sys.stderr, "Expected file '%s'" % filepath_yaml - sys.exit(1) + try: + package = get_developer_package(path, format=FileFormat.yaml) + except PackageMetadataError: + package = None - package = get_developer_package(path) if package is None: - print >> sys.stderr, "Couldn't load the package at %r" % cwd + print >> sys.stderr, "Couldn't load the package at %r" % path sys.exit(1) package.print_info(format_=FileFormat.py) diff --git a/src/rez/developer_package.py b/src/rez/developer_package.py index 273293327..307dbb591 100644 --- a/src/rez/developer_package.py +++ b/src/rez/developer_package.py @@ -8,6 +8,7 @@ from rez.utils.logging_ import print_info, print_error from inspect import isfunction import os.path +import stat class DeveloperPackage(Package): @@ -31,14 +32,16 @@ def root(self): return None @classmethod - def from_path(cls, path): + def from_path(cls, path, format=None): """Load a developer package. A developer package may for example be a package.yaml or package.py in a user's source directory. Args: - path: Directory containing the package definition file. + path: Directory containing the package definition file, or file + path for the package file itself + format: which FileFormat to use, or None to check both .py and .yaml Returns: `Package` object. @@ -46,11 +49,34 @@ def from_path(cls, path): name = None data = None - for name_ in config.plugins.package_repository.filesystem.package_filenames: - for format_ in (FileFormat.py, FileFormat.yaml): - filepath = os.path.join(path, "%s.%s" % (name_, format_.extension)) + if format is None: + formats = (FileFormat.py, FileFormat.yaml) + else: + formats = (format,) - if os.path.isfile(filepath): + try: + mode = os.stat(path).st_mode + except (IOError, OSError): + raise PackageMetadataError( + "Path %r did not exist, or was not accessible" % path) + is_dir = stat.S_ISDIR(mode) + + for name_ in config.plugins.package_repository.filesystem.package_filenames: + for format_ in formats: + if is_dir: + filepath = os.path.join(path, "%s.%s" % (name_, + format_.extension)) + exists = os.path.isfile(filepath) + else: + # if format was not specified, verify that it has the + # right extension before trying to load + if format is None: + if os.path.splitext(path)[1] != format_.extension: + continue + filepath = path + exists = True + + if exists: data = load_from_file(filepath, format_) break if data: diff --git a/src/rez/packages_.py b/src/rez/packages_.py index 92efed142..4e7c5c121 100644 --- a/src/rez/packages_.py +++ b/src/rez/packages_.py @@ -541,9 +541,9 @@ def get_package_from_string(txt, paths=None): return get_package(o.name, o.version, paths=paths) -def get_developer_package(path): +def get_developer_package(path, format=None): from rez.developer_package import DeveloperPackage - return DeveloperPackage.from_path(path) + return DeveloperPackage.from_path(path, format=format) def create_package(name, data, package_cls=None): From 1ca9f97fc0e96ca8b9256a7f833167660cad94bf Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 1 Apr 2017 15:02:12 +1100 Subject: [PATCH 072/124] -added 'stop' rex command as official way for package commands to fail -added support for pkg attrib build_command=False, aka no build step needed -added some helper code to package_py_utils --- src/rez/exceptions.py | 5 ++ src/rez/package_maker__.py | 2 +- src/rez/package_py_utils.py | 105 ++++++++++++++++++++++++++ src/rez/rex.py | 9 +++ src/rezplugins/build_system/custom.py | 7 +- 5 files changed, 126 insertions(+), 2 deletions(-) diff --git a/src/rez/exceptions.py b/src/rez/exceptions.py index 59992f4c7..9e219fdb5 100644 --- a/src/rez/exceptions.py +++ b/src/rez/exceptions.py @@ -108,6 +108,11 @@ class RexUndefinedVariableError(RexError): pass +class RexStopError(RexError): + """Special error raised when a package commands uses the 'stop' command.""" + pass + + class BuildError(RezError): """Base class for any build-related error.""" pass diff --git a/src/rez/package_maker__.py b/src/rez/package_maker__.py index 2f954f9c6..e091dcf1c 100644 --- a/src/rez/package_maker__.py +++ b/src/rez/package_maker__.py @@ -62,7 +62,7 @@ Optional('post_commands'): _commands_schema, # attributes specific to pre-built packages - Optional("build_command"): Or([basestring], basestring), + Optional("build_command"): Or([basestring], basestring, False), Optional("preprocess"): _function_schema, # arbitrary fields diff --git a/src/rez/package_py_utils.py b/src/rez/package_py_utils.py index 2bc95c88b..538f5bed5 100644 --- a/src/rez/package_py_utils.py +++ b/src/rez/package_py_utils.py @@ -158,3 +158,108 @@ def expand_requires(*requests): List of str: Expanded requirements. """ return [expand_requirement(x) for x in requests] + + +def exec_command(attr, cmd): + """Runs a subproc to calculate a package attribute. + """ + import subprocess + + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + out, err = p.communicate() + + if p.returncode: + from rez.exceptions import InvalidPackageError + raise InvalidPackageError( + "Error determining package attribute '%s':\n%s" % (attr, err)) + + return out.strip(), err.strip() + + +def exec_python(attr, src, executable="python"): + """Runs a python subproc to calculate a package attribute. + + Args: + attr (str): Name of package attribute being created. + src (list of str): Python code to execute, will be converted into + semicolon-delimited single line of code. + + Returns: + str: Output of python process. + """ + import subprocess + + if isinstance(src, basestring): + src = [src] + + p = subprocess.Popen( + [executable, "-c", "; ".join(src)], + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + out, err = p.communicate() + + if p.returncode: + from rez.exceptions import InvalidPackageError + raise InvalidPackageError( + "Error determining package attribute '%s':\n%s" % (attr, err)) + + return out.strip() + + +def find_site_python(module_name, paths=None): + """Find the rez native python package that contains the given module. + + This function is used by python 'native' rez installers to find the native + rez python package that represents the python installation that this module + is installed into. + + Note: + This function is dependent on the behavior found in the python '_native' + package found in the 'rez-recipes' repository. Specifically, it expects + to find a python package with a '_site_paths' list attribute listing + the site directories associated with the python installation. + + Args: + module_name (str): Target python module. + paths (list of str, optional): paths to search for packages, + defaults to `config.packages_path`. + + Returns: + `Package`: Native python package containing the named module. + """ + from rez.packages_ import iter_packages + import subprocess + import ast + import os + + py_cmd = 'import {x}; print {x}.__path__'.format(x=module_name) + + p = subprocess.Popen(["python", "-c", py_cmd], stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + out, err = p.communicate() + + if p.returncode: + raise InvalidPackageError( + "Failed to find installed python module '%s':\n%s" + % (module_name, err)) + + module_paths = ast.literal_eval(out.strip()) + + def issubdir(path, parent_path): + return path.startswith(parent_path + os.sep) + + for package in iter_packages("python", paths=paths): + if not hasattr(package, "_site_paths"): + continue + + contained = True + + for module_path in module_paths: + if not any(issubdir(module_path, x) for x in package._site_paths): + contained = False + + if contained: + return package + + raise InvalidPackageError( + "Failed to find python installation containing the module '%s'. Has " + "python been installed as a rez package?" % module_name) diff --git a/src/rez/rex.py b/src/rez/rex.py index 899b24724..c822b58d0 100644 --- a/src/rez/rex.py +++ b/src/rez/rex.py @@ -124,6 +124,11 @@ class Error(Action): Error.register() +class Stop(Action): + name = 'stop' +Stop.register() + + class Command(Action): name = 'command' Command.register() @@ -385,6 +390,10 @@ def error(self, value): self.actions.append(Error(value)) self.interpreter.error(value) + def stop(self, msg, *nargs): + from rez.exceptions import RexStopError + raise RexStopError(msg % nargs) + def command(self, value): # Note: Value is deliberately not formatted in commands self.actions.append(Command(value)) diff --git a/src/rezplugins/build_system/custom.py b/src/rezplugins/build_system/custom.py index 160ac4af6..a0d6ec54a 100644 --- a/src/rezplugins/build_system/custom.py +++ b/src/rezplugins/build_system/custom.py @@ -37,7 +37,7 @@ def is_valid_root(cls, path): except PackageMetadataError: return False - return bool(getattr(package, "build_command", None)) + return (getattr(package, "build_command", None) != None) def __init__(self, working_dir, opts=None, package=None, write_build_scripts=False, verbose=False, build_args=[], child_build_args=[]): @@ -105,6 +105,11 @@ def build(self, context, variant, build_path, install_path, install=False, # get build command command = self.package.build_command + # False just means no build command + if command is False: + ret["success"] = True + return ret + def expand(txt): root = self.package.root install_ = "install" if install else '' From 0ed0a0be223a8258fe2c876591351e63a1d474ee Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 1 Apr 2017 15:45:41 +1100 Subject: [PATCH 073/124] -fixed infinite recursion bug when package_load_callback specified in ResolvedContext --- src/rez/resolved_context.py | 2 +- src/rezgui/dialogs/ResolveDialog.py | 2 +- src/rezgui/qt.py | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/rez/resolved_context.py b/src/rez/resolved_context.py index 52abbe500..52a412973 100644 --- a/src/rez/resolved_context.py +++ b/src/rez/resolved_context.py @@ -243,7 +243,7 @@ def __init__(self, package_requests, verbosity=0, timestamp=None, def _package_load_callback(package): if package_load_callback: - _package_load_callback(package) + package_load_callback(package) self.num_loaded_packages += 1 request = self.requested_packages(include_implicit=True) diff --git a/src/rezgui/dialogs/ResolveDialog.py b/src/rezgui/dialogs/ResolveDialog.py index 44c877917..230ccc16f 100644 --- a/src/rezgui/dialogs/ResolveDialog.py +++ b/src/rezgui/dialogs/ResolveDialog.py @@ -28,7 +28,7 @@ def __init__(self, context_model, parent=None, advanced=False): self.started = False self._finished = False - self.busy_cursor = QtGui.QCursor(QtCore.Qt.WaitCursor) + #self.busy_cursor = QtGui.QCursor(QtCore.Qt.WaitCursor) self.edit = StreamableTextEdit() self.edit.setStyleSheet("font: 9pt 'Courier'") diff --git a/src/rezgui/qt.py b/src/rezgui/qt.py index d3e00056b..ecc4fa593 100644 --- a/src/rezgui/qt.py +++ b/src/rezgui/qt.py @@ -1,6 +1,7 @@ """ Abstraction for PyQt/PySide import. """ +import os import sys from rez.config import config from rez.exceptions import RezGuiQTImportError From b99717a60cda703ae23f32646eded809f8ed9f4b Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 1 Apr 2017 16:10:35 +1100 Subject: [PATCH 074/124] version up --- src/rez/utils/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index 1a1116d18..9aa30eade 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.12.0" +_rez_version = "2.13.0" try: from rez.vendor.version.version import Version From 8ad7202231b4001561fafcded25ffd3596d88e1d Mon Sep 17 00:00:00 2001 From: Brendan Abel Date: Wed, 5 Apr 2017 15:35:51 -0700 Subject: [PATCH 075/124] Update shotgun toolkit hook with local changes --- src/support/shotgun_toolkit/rez_app_launch.py | 25 +++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/src/support/shotgun_toolkit/rez_app_launch.py b/src/support/shotgun_toolkit/rez_app_launch.py index c1c3f2dfc..48ce9211d 100755 --- a/src/support/shotgun_toolkit/rez_app_launch.py +++ b/src/support/shotgun_toolkit/rez_app_launch.py @@ -59,13 +59,33 @@ def execute(self, app_path, app_args, version, **kwargs): # NUKE_PATH is used by tk-nuke # HIERO_PLUGIN_PATH is used by tk-nuke (nukestudio) # KATANA_RESOURCES is used by tk-katana - config.parent_variables = ["PYTHONPATH", "HOUDINI_PATH", "NUKE_PATH", "HIERO_PLUGIN_PATH", "KATANA_RESOURCES"] + parent_variables = ["PYTHONPATH", "HOUDINI_PATH", "NUKE_PATH", + "HIERO_PLUGIN_PATH", "KATANA_RESOURCES"] rez_packages = extra["rez_packages"] context = ResolvedContext(rez_packages) use_rez = True + # Rez env callback to restore sgtk paths setup by the shotgun launcher + # and the individual engines. + def restore_sgtk_env(executor): + """ + Restore the settings from the current tank environment setup + that happened before rez was able to run. + + """ + for envvar in parent_variables: + paths = os.environ.get(envvar, '').split(';') + #TODO: Remove this when P:\code is removed from domain policy + # P:\code is normally removed by rez, but since we have to + # restore some of the env vars setup by tank, we need to + # pull out the non-tank envvars setup here, which is mostly any + # path on P:\code. + paths = [p for p in paths if r'P:\code' not in p] + for path in reversed(paths): + getattr(executor.env, envvar).prepend(path) + system = sys.platform shell_type = 'bash' if system == "linux2": @@ -102,7 +122,8 @@ def execute(self, app_path, app_args, version, **kwargs): parent_environ=n_env, shell=shell_type, stdin=False, - block=False + block=False, + post_actions_callback=restore_sgtk_env, ) exit_code = proc.wait() context.print_info(verbosity=True) From 623a0f602d08b7ed523583de310802d6e3a8b126 Mon Sep 17 00:00:00 2001 From: Paul Molodowitch Date: Wed, 19 Apr 2017 11:58:58 -0700 Subject: [PATCH 076/124] compatibility fix for newer versions of git --- src/rezplugins/release_vcs/git.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/rezplugins/release_vcs/git.py b/src/rezplugins/release_vcs/git.py index b13360de7..cc422853b 100644 --- a/src/rezplugins/release_vcs/git.py +++ b/src/rezplugins/release_vcs/git.py @@ -78,8 +78,11 @@ def get_tracking_branch(self): "--symbolic-full-name", "@{u}")[0] return remote_uri.split('/', 1) except Exception as e: - if ("No upstream branch" not in str(e) - and "No upstream configured" not in str(e)): + # capitalization of message changed sometime between git 1.8.3 + # and 2.12 - used to be "No upstream", now "no upstream".. + errmsg = str(e).lower() + if ("no upstream branch" not in errmsg + and "no upstream configured" not in errmsg): raise e return (None, None) From 486b3f740697351dcd77d375ddd1fb599d6b37c1 Mon Sep 17 00:00:00 2001 From: Brendan Abel Date: Tue, 20 Jun 2017 12:59:24 -0700 Subject: [PATCH 077/124] moved git ignore to global ignore --- .gitignore | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index 3a068683b..831efbaa3 100644 --- a/.gitignore +++ b/.gitignore @@ -8,5 +8,4 @@ build/ dist/ *~ docs/_build -.DS_Store -.idea/ \ No newline at end of file +.DS_Store \ No newline at end of file From 9012bce98873c67aebdb2321bbce5bbe1b23a03a Mon Sep 17 00:00:00 2001 From: Brendan Abel Date: Tue, 20 Jun 2017 12:59:45 -0700 Subject: [PATCH 078/124] added newline --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 831efbaa3..5f9ad53aa 100644 --- a/.gitignore +++ b/.gitignore @@ -8,4 +8,4 @@ build/ dist/ *~ docs/_build -.DS_Store \ No newline at end of file +.DS_Store From b70b42358689651236b377c84e10ce4c07758345 Mon Sep 17 00:00:00 2001 From: Federico Naum Date: Tue, 4 Jul 2017 12:24:19 +1000 Subject: [PATCH 079/124] fix apply function so the enviornment variables are available after applying the resolved context --- src/rez/resolved_context.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/rez/resolved_context.py b/src/rez/resolved_context.py index 52a412973..aeebf7c49 100644 --- a/src/rez/resolved_context.py +++ b/src/rez/resolved_context.py @@ -1043,6 +1043,7 @@ def apply(self, parent_environ=None): interpreter = Python(target_environ=os.environ) executor = self._create_executor(interpreter, parent_environ) self._execute(executor) + executor.get_output() @_on_success def which(self, cmd, parent_environ=None, fallback=False): From d9e83b8e794e453eb0a5b9f04bc7fc11e871b20c Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 4 Jul 2017 15:52:40 +1000 Subject: [PATCH 080/124] -minor update to apply() fix -added test for ResolvedContext.apply() --- src/rez/bind/hello_world.py | 1 + src/rez/resolved_context.py | 10 ++++++---- src/rez/rex.py | 10 ++++++++-- src/rez/tests/test_context.py | 6 ++++++ src/rez/utils/_version.py | 2 +- 5 files changed, 22 insertions(+), 7 deletions(-) diff --git a/src/rez/bind/hello_world.py b/src/rez/bind/hello_world.py index 36e9a68c5..f080a37db 100644 --- a/src/rez/bind/hello_world.py +++ b/src/rez/bind/hello_world.py @@ -16,6 +16,7 @@ def commands(): env.PATH.append('{this.root}/bin') + env.OH_HAI_WORLD = "hello" def hello_world_source(): diff --git a/src/rez/resolved_context.py b/src/rez/resolved_context.py index aeebf7c49..71b16d213 100644 --- a/src/rez/resolved_context.py +++ b/src/rez/resolved_context.py @@ -1035,15 +1035,17 @@ def get_actions(self, parent_environ=None): def apply(self, parent_environ=None): """Apply the context to the current python session. - Note that this updates os.environ and possibly sys.path. + Note that this updates os.environ and possibly sys.path, if + `parent_environ` is not provided. - @param environ Environment to interpret the context within, defaults to - os.environ if None. + Args: + parent_environ: Environment to interpret the context within, + defaults to os.environ if None. """ interpreter = Python(target_environ=os.environ) executor = self._create_executor(interpreter, parent_environ) self._execute(executor) - executor.get_output() + interpreter.apply_environ() @_on_success def which(self, cmd, parent_environ=None, fallback=False): diff --git a/src/rez/rex.py b/src/rez/rex.py index c822b58d0..d50ba1291 100644 --- a/src/rez/rex.py +++ b/src/rez/rex.py @@ -541,7 +541,8 @@ def __init__(self, target_environ=None, passive=False): target_environ: dict If target_environ is None or os.environ, interpreted actions are applied to the current python interpreter. Otherwise, changes are - only applied to target_environ. + only applied to target_environ. In either case you must call + `apply_environ` to flush all changes to the target environ dict. passive: bool If True, commands that do not update the environment (such as info) @@ -559,12 +560,17 @@ def __init__(self, target_environ=None, passive=False): def set_manager(self, manager): self.manager = manager - def get_output(self, style=OutputStyle.file): + def apply_environ(self): + """Apply changes to target environ. + """ if self.manager is None: raise RezSystemError("You must call 'set_manager' on a Python rex " "interpreter before using it.") self.target_environ.update(self.manager.environ) + + def get_output(self, style=OutputStyle.file): + self.apply_environ() return self.manager.environ def setenv(self, key, value): diff --git a/src/rez/tests/test_context.py b/src/rez/tests/test_context.py index 0b4202709..2fa7df9c7 100644 --- a/src/rez/tests/test_context.py +++ b/src/rez/tests/test_context.py @@ -39,6 +39,12 @@ def test_create_context(self): r = ResolvedContext(["hello_world"]) r.print_info() + def test_apply(self): + """Test apply() function.""" + r = ResolvedContext(["hello_world"]) + r.apply() + self.assertEqual(os.environ.get("OH_HAI_WORLD"), "hello") + def test_execute_command(self): """Test command execution in context.""" if platform_.name == "windows": diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index 9aa30eade..90543bdae 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.13.0" +_rez_version = "2.13.1" try: from rez.vendor.version.version import Version From 1da48879e5705c4e0d89f3aece9e510bc5a5b26d Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 4 Jul 2017 16:23:52 +1000 Subject: [PATCH 081/124] -fixed bug where parent_environ was not being set into the target env dict in ResolvedContext.execute_command --- src/rez/resolved_context.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/rez/resolved_context.py b/src/rez/resolved_context.py index 71b16d213..237103be7 100644 --- a/src/rez/resolved_context.py +++ b/src/rez/resolved_context.py @@ -1093,7 +1093,13 @@ def execute_command(self, args, parent_environ=None, **subprocess_kwargs): Note: This does not alter the current python session. """ - interpreter = Python(target_environ={}) + if parent_environ in (None, os.environ): + target_environ = {} + else: + target_environ = parent_environ.copy() + + interpreter = Python(target_environ=target_environ) + executor = self._create_executor(interpreter, parent_environ) self._execute(executor) return interpreter.subprocess(args, **subprocess_kwargs) From f70013d1327f814081a351ef2b5b931e0e9c112f Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 4 Jul 2017 16:37:01 +1000 Subject: [PATCH 082/124] -added test for environ in execute_command --- src/rez/tests/test_context.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/src/rez/tests/test_context.py b/src/rez/tests/test_context.py index 2fa7df9c7..3f10f1e22 100644 --- a/src/rez/tests/test_context.py +++ b/src/rez/tests/test_context.py @@ -58,6 +58,25 @@ def test_execute_command(self): stdout = stdout.strip() self.assertEqual(stdout, "Hello Rez World!") + def test_execute_command_environ(self): + """Test that execute_command properly sets environ dict.""" + parent_environ = {"BIGLY": "covfefe"} + r = ResolvedContext(["hello_world"]) + + pycode = ("import os; " + "print os.getenv(\"BIGLY\"); " + "print os.getenv(\"OH_HAI_WORLD\")") + + args = ["python", "-c", pycode] + + p = r.execute_command(args, parent_environ=parent_environ, + stdout=subprocess.PIPE) + stdout, _ = p.communicate() + stdout = stdout.strip() + parts = [x.strip() for x in stdout.split('\n')] + + self.assertEqual(parts, ["covfefe", "hello"]) + def test_serialize(self): """Test save/load of context.""" # save From 8a89487d42f9f0cd3058d8f130bb6dbd3011aa71 Mon Sep 17 00:00:00 2001 From: Blazej Floch Date: Wed, 12 Jul 2017 14:23:57 -0400 Subject: [PATCH 083/124] Implements #442 - Adds build_requires and private_build_requires options to rez-depends --- src/rez/cli/depends.py | 10 +++++++++- src/rez/package_search.py | 14 ++++++++++++-- 2 files changed, 21 insertions(+), 3 deletions(-) diff --git a/src/rez/cli/depends.py b/src/rez/cli/depends.py index 6767580e2..887d7af6f 100644 --- a/src/rez/cli/depends.py +++ b/src/rez/cli/depends.py @@ -10,6 +10,12 @@ def setup_parser(parser, completions=False): parser.add_argument( "--paths", type=str, default=None, help="set package search path") + parser.add_argument( + "-b", "--build-requires", action="store_true", default=False, + help="Include build_requires") + parser.add_argument( + "-p", "--private-build-requires", action="store_true", default=False, + help="Include private_build_requires") parser.add_argument( "-g", "--graph", action="store_true", help="display the dependency tree as an image") @@ -51,7 +57,9 @@ def command(opts, parser, extra_arg_groups=None): pkgs_list, g = get_reverse_dependency_tree( package_name=opts.PKG, depth=opts.depth, - paths=pkg_paths) + paths=pkg_paths, + build_requires=opts.build_requires, + private_build_requires=opts.private_build_requires) if opts.graph or opts.print_graph or opts.write_graph: gstr = write_dot(g) diff --git a/src/rez/package_search.py b/src/rez/package_search.py index eb5c827f9..373c93a6a 100644 --- a/src/rez/package_search.py +++ b/src/rez/package_search.py @@ -15,7 +15,9 @@ from rez.utils.formatting import PackageRequest -def get_reverse_dependency_tree(package_name, depth=None, paths=None): +def get_reverse_dependency_tree(package_name, depth=None, paths=None, + build_requires=False, + private_build_requires=False): """Find packages that depend on the given package. This is a reverse dependency lookup. A tree is constructed, showing what @@ -60,7 +62,15 @@ def get_reverse_dependency_tree(package_name, depth=None, paths=None): continue pkg = max(packages, key=lambda x: x.version) - requires = set(pkg.requires or []) + requires = [] + if not build_requires and not private_build_requires: + requires = pkg.requires or [] + else: + for variant in pkg.iter_variants(): + requires += variant.get_requires(build_requires, private_build_requires) + + requires = set(requires) + for req_list in (pkg.variants or []): requires.update(req_list) From b33c924dc4cde3674c5f706e59573b6ea9f4f42c Mon Sep 17 00:00:00 2001 From: Paul Molodowitch Date: Mon, 31 Jul 2017 18:39:44 -0700 Subject: [PATCH 084/124] solver fix for variant splitting --- src/rez/solver.py | 2 +- .../solver/packages/test_variant_split_end/1.0/package.py | 5 +++++ .../solver/packages/test_variant_split_end/2.0/package.py | 4 ++++ .../solver/packages/test_variant_split_end/3.0/package.py | 4 ++++ .../solver/packages/test_variant_split_end/4.0/package.py | 4 ++++ .../solver/packages/test_variant_split_mid1/1.0/package.py | 4 ++++ .../solver/packages/test_variant_split_mid1/2.0/package.py | 4 ++++ .../solver/packages/test_variant_split_mid2/1.0/package.py | 4 ++++ .../solver/packages/test_variant_split_mid2/2.0/package.py | 4 ++++ .../packages/test_variant_split_start/1.0/package.py | 4 ++++ .../packages/test_variant_split_start/2.0/package.py | 4 ++++ src/rez/tests/test_completion.py | 4 +++- src/rez/tests/test_packages.py | 5 +++++ src/rez/tests/test_solver.py | 7 +++++++ 14 files changed, 57 insertions(+), 2 deletions(-) create mode 100644 src/rez/tests/data/solver/packages/test_variant_split_end/1.0/package.py create mode 100644 src/rez/tests/data/solver/packages/test_variant_split_end/2.0/package.py create mode 100644 src/rez/tests/data/solver/packages/test_variant_split_end/3.0/package.py create mode 100644 src/rez/tests/data/solver/packages/test_variant_split_end/4.0/package.py create mode 100644 src/rez/tests/data/solver/packages/test_variant_split_mid1/1.0/package.py create mode 100644 src/rez/tests/data/solver/packages/test_variant_split_mid1/2.0/package.py create mode 100644 src/rez/tests/data/solver/packages/test_variant_split_mid2/1.0/package.py create mode 100644 src/rez/tests/data/solver/packages/test_variant_split_mid2/2.0/package.py create mode 100644 src/rez/tests/data/solver/packages/test_variant_split_start/1.0/package.py create mode 100644 src/rez/tests/data/solver/packages/test_variant_split_start/2.0/package.py diff --git a/src/rez/solver.py b/src/rez/solver.py index ca15000c2..d25cc30f0 100644 --- a/src/rez/solver.py +++ b/src/rez/solver.py @@ -833,7 +833,7 @@ def _split(i_entry, n_variants, common_fams=None): if result: entry, next_entry = result entries = self.entries[:i_entry] + [entry] - next_entries = [next_entry] + self.entries[i_entry:] + next_entries = [next_entry] + self.entries[i_entry + 1:] else: entries = self.entries[:i_entry + 1] next_entries = self.entries[i_entry + 1:] diff --git a/src/rez/tests/data/solver/packages/test_variant_split_end/1.0/package.py b/src/rez/tests/data/solver/packages/test_variant_split_end/1.0/package.py new file mode 100644 index 000000000..74214eacb --- /dev/null +++ b/src/rez/tests/data/solver/packages/test_variant_split_end/1.0/package.py @@ -0,0 +1,5 @@ +name = "test_variant_split_end" +version = "1.0" + +variants = [["!test_variant_split_mid2"], ["!test_variant_split_start-2"]] + diff --git a/src/rez/tests/data/solver/packages/test_variant_split_end/2.0/package.py b/src/rez/tests/data/solver/packages/test_variant_split_end/2.0/package.py new file mode 100644 index 000000000..a4f3435e3 --- /dev/null +++ b/src/rez/tests/data/solver/packages/test_variant_split_end/2.0/package.py @@ -0,0 +1,4 @@ +name = "test_variant_split_end" +version = "2.0" + +variants = [["!test_variant_split_start"], ["!test_variant_split_mid1"]] diff --git a/src/rez/tests/data/solver/packages/test_variant_split_end/3.0/package.py b/src/rez/tests/data/solver/packages/test_variant_split_end/3.0/package.py new file mode 100644 index 000000000..0ebf527a2 --- /dev/null +++ b/src/rez/tests/data/solver/packages/test_variant_split_end/3.0/package.py @@ -0,0 +1,4 @@ +name = "test_variant_split_end" +version = "3.0" + +variants = [["!test_variant_split_start"], ["!test_variant_split_mid2"]] diff --git a/src/rez/tests/data/solver/packages/test_variant_split_end/4.0/package.py b/src/rez/tests/data/solver/packages/test_variant_split_end/4.0/package.py new file mode 100644 index 000000000..535b76f52 --- /dev/null +++ b/src/rez/tests/data/solver/packages/test_variant_split_end/4.0/package.py @@ -0,0 +1,4 @@ +name = "test_variant_split_end" +version = "4.0" + +variants = [["!test_variant_split_start"], ["!test_variant_split_mid1"]] diff --git a/src/rez/tests/data/solver/packages/test_variant_split_mid1/1.0/package.py b/src/rez/tests/data/solver/packages/test_variant_split_mid1/1.0/package.py new file mode 100644 index 000000000..95f5d1ec0 --- /dev/null +++ b/src/rez/tests/data/solver/packages/test_variant_split_mid1/1.0/package.py @@ -0,0 +1,4 @@ +name = "test_variant_split_mid1" +version = "1.0" + +variants = [["test_variant_split_end-2"], ["test_variant_split_end-1"]] diff --git a/src/rez/tests/data/solver/packages/test_variant_split_mid1/2.0/package.py b/src/rez/tests/data/solver/packages/test_variant_split_mid1/2.0/package.py new file mode 100644 index 000000000..e761f525a --- /dev/null +++ b/src/rez/tests/data/solver/packages/test_variant_split_mid1/2.0/package.py @@ -0,0 +1,4 @@ +name = "test_variant_split_mid1" +version = "2.0" + +variants = [["test_variant_split_end-2"], ["test_variant_split_end-4"]] diff --git a/src/rez/tests/data/solver/packages/test_variant_split_mid2/1.0/package.py b/src/rez/tests/data/solver/packages/test_variant_split_mid2/1.0/package.py new file mode 100644 index 000000000..37f427da1 --- /dev/null +++ b/src/rez/tests/data/solver/packages/test_variant_split_mid2/1.0/package.py @@ -0,0 +1,4 @@ +name = "test_variant_split_mid2" +version = "1.0" + +variants = [["test_variant_split_end-3"], ["test_variant_split_end-1"]] diff --git a/src/rez/tests/data/solver/packages/test_variant_split_mid2/2.0/package.py b/src/rez/tests/data/solver/packages/test_variant_split_mid2/2.0/package.py new file mode 100644 index 000000000..5eba9bab0 --- /dev/null +++ b/src/rez/tests/data/solver/packages/test_variant_split_mid2/2.0/package.py @@ -0,0 +1,4 @@ +name = "test_variant_split_mid2" +version = "2.0" + +variants = [["test_variant_split_end-1"], ["test_variant_split_end-3"]] diff --git a/src/rez/tests/data/solver/packages/test_variant_split_start/1.0/package.py b/src/rez/tests/data/solver/packages/test_variant_split_start/1.0/package.py new file mode 100644 index 000000000..4bd4f8d65 --- /dev/null +++ b/src/rez/tests/data/solver/packages/test_variant_split_start/1.0/package.py @@ -0,0 +1,4 @@ +name = "test_variant_split_start" +version = "1.0" + +variants = [["test_variant_split_mid1-1"], ["test_variant_split_mid2-2"]] diff --git a/src/rez/tests/data/solver/packages/test_variant_split_start/2.0/package.py b/src/rez/tests/data/solver/packages/test_variant_split_start/2.0/package.py new file mode 100644 index 000000000..e29d77db5 --- /dev/null +++ b/src/rez/tests/data/solver/packages/test_variant_split_start/2.0/package.py @@ -0,0 +1,4 @@ +name = "test_variant_split_start" +version = "2.0" + +variants = [["test_variant_split_mid1-2"], ["test_variant_split_mid2-2"]] diff --git a/src/rez/tests/test_completion.py b/src/rez/tests/test_completion.py index b23ae1d88..00f74bc9f 100644 --- a/src/rez/tests/test_completion.py +++ b/src/rez/tests/test_completion.py @@ -48,7 +48,9 @@ def _eq(prefix, expected_completions): _eq("zzz", []) _eq("", ["bahish", "nada", "nopy", "pybah", "pydad", "pyfoo", "pymum", - "pyodd", "pyson", "pysplit", "python", "pyvariants"]) + "pyodd", "pyson", "pysplit", "python", "pyvariants", + "test_variant_split_start", "test_variant_split_mid1", + "test_variant_split_mid2", "test_variant_split_end"]) _eq("py", ["pybah", "pydad", "pyfoo", "pymum", "pyodd", "pyson", "pysplit", "python", "pyvariants"]) _eq("pys", ["pyson", "pysplit"]) diff --git a/src/rez/tests/test_packages.py b/src/rez/tests/test_packages.py index 37790e21e..d1c96c3a8 100644 --- a/src/rez/tests/test_packages.py +++ b/src/rez/tests/test_packages.py @@ -30,6 +30,11 @@ 'pysplit-5', 'pysplit-6', 'pysplit-7', 'python-2.5.2', 'python-2.6.0', 'python-2.6.8', 'python-2.7.0', 'pyvariants-2', + 'test_variant_split_start-1.0', 'test_variant_split_start-2.0', + 'test_variant_split_mid1-1.0', 'test_variant_split_mid1-2.0', + 'test_variant_split_mid2-1.0', 'test_variant_split_mid2-2.0', + 'test_variant_split_end-1.0', 'test_variant_split_end-2.0', + 'test_variant_split_end-3.0', 'test_variant_split_end-4.0', # packages from data/packages/py_packages and .../yaml_packages 'unversioned', 'unversioned_py', diff --git a/src/rez/tests/test_solver.py b/src/rez/tests/test_solver.py index ce2d394c5..7a06c13be 100644 --- a/src/rez/tests/test_solver.py +++ b/src/rez/tests/test_solver.py @@ -212,6 +212,13 @@ def test_10_intersection_priority_mode(self): self._solve(["pyvariants", "python", "nada"], ["python-2.6.8[]", "nada[]", "pyvariants-2[1]"]) + def test_11_variant_splitting(self): + self._solve(["test_variant_split_start"], + ["test_variant_split_end-1.0[1]", + "test_variant_split_mid2-2.0[0]", + "test_variant_split_start-1.0[1]"]) + + if __name__ == '__main__': unittest.main() From d4e5ed06f740c7cd416ed54f250015a52993d6dd Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 30 Sep 2017 09:59:19 +1000 Subject: [PATCH 085/124] wip doc updates --- src/rez/SOLVER.md | 125 ++++++++++++++++++++++++++++++++++++++++++++++ src/rez/solver.py | 13 +++++ 2 files changed, 138 insertions(+) create mode 100644 src/rez/SOLVER.md diff --git a/src/rez/SOLVER.md b/src/rez/SOLVER.md new file mode 100644 index 000000000..cab989582 --- /dev/null +++ b/src/rez/SOLVER.md @@ -0,0 +1,125 @@ +# Description Of Solver Algorithm + +## Glossary + +* A **phase** is a current state of the solve. It contains a list of **scopes**. + +A 'scope' is a package request. If the request isn't a conflict, then a scope +also contains the actual list of variants that match the request. + +The solve loop performs 5 different types of operations: + +* EXTRACTION. This happens when a common dependency is found in all the variants + in a scope. For example if every version of pkg 'foo' depends on some version + of python, the 'extracted' dependency might be "python-2.6|2.7". An extraction + then results in either an INTERSECT or an ADD. + +* INTERSECT: This happens when an extracted dependency overlaps with an existing + scope. For example "python-2" might be a current scope. Pkg foo's common dependency + python-2.6|2.7 would be 'intersected' with this scope. This might result in a + conflict, which would cause the whole phase to fail (and possibly the whole solve). + Or, as in this case, it narrows an existing scope to 'python-2.6|2.7'. + +* ADD: This happens when an extraction is a new pkg request. A new scope is + created and added to the current list of scopes. + +* REDUCE: This is when a scope iterates over all of its variants and removes those + that conflict with another scope. If this removes all the variants in the scope, + the phase has failed - this is called a "total reduction". This type of failure + is not common - usually it's a conflicting INTERSECT that causes a failure. + +* SPLIT: Once a phase has been extracted/intersected/added/reduced as much as + possible (this is called 'exhausted'), we are left with either a solution (each + scope contains only a single variant), or an unsolved phase. This is when the + algorithm needs to recurse (although it doesn't actually recurse, it uses a stack + instead). A SPLIT occurs at this point. The first scope with more than one + variant is found. This scope is split in two (let us say ScopeA and ScopeB), + where ScopeA has at least one common dependency (worst case scenario, ScopeA + contains a single variant). This is done because it guarantees a later extraction, + which hopefully gets us closer to a solution. Now, two phases are created (let us + say PhaseA and PhaseB) - identical to the current phase, except that PhaseA has + ScopeA instead of the original, and PhaseB has ScopeB instead of the original. + Now, we attempt to solve PhaseA, and if that fails, we attempt to solve PhaseB. + +Following the process above, we maintain a 'phase stack'. We run a loop, and in +each loop, we attempt to solve the phase at the top of the stack. If the phase +becomes exhaused, then it is split, and replaced with 2 phases (so the stack +grows by 1). If the phase is solved, then we have the solution, and the other +phases are discarded. If the phase fails to solve, then it is removed from the +stack - if the stack is then empty, then there is no solution. + +The pseudocode for a solve looks like this:: + + def solve(requests): + phase = create_initial_phase(requests) + phase_stack = stack() + phase_stack.push(phase) + + while not solved(): + phase = phase_stack.pop() + if phase.failed: + phase = phase_stack.pop() # discard previous failed phase + + if phase.exhausted: + phase, next_phase = phase.split() + phase_stack.push(next_phase) + + new_phase = solve_phase(phase) + if new_phase.failed: + phase_stack.push(new_phase) # we keep last fail on the stack + elif new_phase.solved: + # some housekeeping here, like checking for cycles + final_phase = finalise_phase(new_phase) + phase_stack.push(final_phase) + else: + phase_stack.push(new_phase) # phase is exhausted + + def solve_phase(phase): + while True: + while True: + foreach phase.scope as x: + extractions |= collect_extractions(x) + + if extractions_present: + foreach phase.scope as x: + intersect(x, extractions) + if failed(x): + set_fail() + return + elif intersected(x): + reductions |= add_reductions_involving(x) + + foreach new_request in extractions: + scope = new_scope(new_request) + reductions |= add_reductions_involving(scope) + phase.add(scope) + else: + break + + if no intersections and no adds: + break + + foreach scope_a, scope_b in reductions: + scope_b.reduce_by(scope_a) + if totally_reduced(scope_b): + set_fail() + return + +There are 2 notable points missing from the pseudocode, related to optimisations: + +* Scopes keep a set of package families so that they can quickly skip unnecessary + reductions. For example, all 'foo' pkgs may depend only on the set (python, bah), + so when reduced against 'maya', this becomes basically a no-op. + +* Objects in the solver (phases, scopes etc) are immutable. Whenever a change + occurs - such as a scope being narrowed as a result of an intersect - what + actually happens is that a new object is created, often based on a shallow copy + of the previous object. This is basically implementing copy-on-demand - lots of + scopes are shared between phases in the stack, if objects were not immutable + then creating a new phase would involve a deep copy of the entire state of the + solver. + +Notes on how to interpret verbose debugging output: + +This output indicates that a phase is starting. The number indicates the number +of phases that have been solved so far, regardle diff --git a/src/rez/solver.py b/src/rez/solver.py index ca15000c2..c97b8deb3 100644 --- a/src/rez/solver.py +++ b/src/rez/solver.py @@ -45,6 +45,13 @@ ScopeA instead of the original, and PhaseB has ScopeB instead of the original. Now, we attempt to solve PhaseA, and if that fails, we attempt to solve PhaseB. +Following the process above, we maintain a 'phase stack'. We run a loop, and in +each loop, we attempt to solve the phase at the top of the stack. If the phase +becomes exhaused, then it is split, and replaced with 2 phases (so the stack +grows by 1). If the phase is solved, then we have the solution, and the other +phases are discarded. If the phase fails to solve, then it is removed from the +stack - if the stack is then empty, then there is no solution. + The pseudocode for a solve looks like this:: def solve(requests): @@ -115,6 +122,12 @@ def solve_phase(phase): scopes are shared between phases in the stack, if objects were not immutable then creating a new phase would involve a deep copy of the entire state of the solver. + +Notes on how to interpret verbose debugging output: + +This output indicates that a phase is starting. The number indicates the number +of phases that have been solved so far, regardle + """ from rez.config import config from rez.packages_ import iter_packages From b3575e91169b2faec1f59cb1540b1790979a6f5a Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 30 Sep 2017 10:01:16 +1000 Subject: [PATCH 086/124] docs wip --- src/rez/SOLVER.md | 5 +- src/rez/solver.py | 124 +--------------------------------------------- 2 files changed, 3 insertions(+), 126 deletions(-) diff --git a/src/rez/SOLVER.md b/src/rez/SOLVER.md index cab989582..7ea685ca9 100644 --- a/src/rez/SOLVER.md +++ b/src/rez/SOLVER.md @@ -3,9 +3,8 @@ ## Glossary * A **phase** is a current state of the solve. It contains a list of **scopes**. - -A 'scope' is a package request. If the request isn't a conflict, then a scope -also contains the actual list of variants that match the request. +* A **scope** is a package request. If the request isn't a conflict, then a scope + also contains the actual list of variants that match the request. The solve loop performs 5 different types of operations: diff --git a/src/rez/solver.py b/src/rez/solver.py index c97b8deb3..490415865 100644 --- a/src/rez/solver.py +++ b/src/rez/solver.py @@ -5,129 +5,7 @@ in resolve.py instead, which will use cached data where possible to provide you with a faster resolve. - -A 'phase' is a current state of the solve. It contains a list of 'scopes'. - -A 'scope' is a package request. If the request isn't a conflict, then a scope -also contains the actual list of variants that match the request. - -The solve loop performs 5 different types of operations: - -* EXTRACTION. This happens when a common dependency is found in all the variants - in a scope. For example if every version of pkg 'foo' depends on some version - of python, the 'extracted' dependency might be "python-2.6|2.7". An extraction - then results in either an INTERSECT or an ADD. - -* INTERSECT: This happens when an extracted dependency overlaps with an existing - scope. For example "python-2" might be a current scope. Pkg foo's common dependency - python-2.6|2.7 would be 'intersected' with this scope. This might result in a - conflict, which would cause the whole phase to fail (and possibly the whole solve). - Or, as in this case, it narrows an existing scope to 'python-2.6|2.7'. - -* ADD: This happens when an extraction is a new pkg request. A new scope is - created and added to the current list of scopes. - -* REDUCE: This is when a scope iterates over all of its variants and removes those - that conflict with another scope. If this removes all the variants in the scope, - the phase has failed - this is called a "total reduction". This type of failure - is not common - usually it's a conflicting INTERSECT that causes a failure. - -* SPLIT: Once a phase has been extracted/intersected/added/reduced as much as - possible (this is called 'exhausted'), we are left with either a solution (each - scope contains only a single variant), or an unsolved phase. This is when the - algorithm needs to recurse (although it doesn't actually recurse, it uses a stack - instead). A SPLIT occurs at this point. The first scope with more than one - variant is found. This scope is split in two (let us say ScopeA and ScopeB), - where ScopeA has at least one common dependency (worst case scenario, ScopeA - contains a single variant). This is done because it guarantees a later extraction, - which hopefully gets us closer to a solution. Now, two phases are created (let us - say PhaseA and PhaseB) - identical to the current phase, except that PhaseA has - ScopeA instead of the original, and PhaseB has ScopeB instead of the original. - Now, we attempt to solve PhaseA, and if that fails, we attempt to solve PhaseB. - -Following the process above, we maintain a 'phase stack'. We run a loop, and in -each loop, we attempt to solve the phase at the top of the stack. If the phase -becomes exhaused, then it is split, and replaced with 2 phases (so the stack -grows by 1). If the phase is solved, then we have the solution, and the other -phases are discarded. If the phase fails to solve, then it is removed from the -stack - if the stack is then empty, then there is no solution. - -The pseudocode for a solve looks like this:: - - def solve(requests): - phase = create_initial_phase(requests) - phase_stack = stack() - phase_stack.push(phase) - - while not solved(): - phase = phase_stack.pop() - if phase.failed: - phase = phase_stack.pop() # discard previous failed phase - - if phase.exhausted: - phase, next_phase = phase.split() - phase_stack.push(next_phase) - - new_phase = solve_phase(phase) - if new_phase.failed: - phase_stack.push(new_phase) # we keep last fail on the stack - elif new_phase.solved: - # some housekeeping here, like checking for cycles - final_phase = finalise_phase(new_phase) - phase_stack.push(final_phase) - else: - phase_stack.push(new_phase) # phase is exhausted - - def solve_phase(phase): - while True: - while True: - foreach phase.scope as x: - extractions |= collect_extractions(x) - - if extractions_present: - foreach phase.scope as x: - intersect(x, extractions) - if failed(x): - set_fail() - return - elif intersected(x): - reductions |= add_reductions_involving(x) - - foreach new_request in extractions: - scope = new_scope(new_request) - reductions |= add_reductions_involving(scope) - phase.add(scope) - else: - break - - if no intersections and no adds: - break - - foreach scope_a, scope_b in reductions: - scope_b.reduce_by(scope_a) - if totally_reduced(scope_b): - set_fail() - return - -There are 2 notable points missing from the pseudocode, related to optimisations: - -* Scopes keep a set of package families so that they can quickly skip unnecessary - reductions. For example, all 'foo' pkgs may depend only on the set (python, bah), - so when reduced against 'maya', this becomes basically a no-op. - -* Objects in the solver (phases, scopes etc) are immutable. Whenever a change - occurs - such as a scope being narrowed as a result of an intersect - what - actually happens is that a new object is created, often based on a shallow copy - of the previous object. This is basically implementing copy-on-demand - lots of - scopes are shared between phases in the stack, if objects were not immutable - then creating a new phase would involve a deep copy of the entire state of the - solver. - -Notes on how to interpret verbose debugging output: - -This output indicates that a phase is starting. The number indicates the number -of phases that have been solved so far, regardle - +See SOLVER.md for an in-depth description of how this module works. """ from rez.config import config from rez.packages_ import iter_packages From dc56cba77fb09b4cc45dd5441835740a5ab9b74f Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 30 Sep 2017 10:26:53 +1000 Subject: [PATCH 087/124] wip docs --- src/rez/SOLVER.md | 29 ++++++++++++++++++++--------- 1 file changed, 20 insertions(+), 9 deletions(-) diff --git a/src/rez/SOLVER.md b/src/rez/SOLVER.md index 7ea685ca9..df72a9499 100644 --- a/src/rez/SOLVER.md +++ b/src/rez/SOLVER.md @@ -1,6 +1,6 @@ # Description Of Solver Algorithm -## Glossary +## Overview * A **phase** is a current state of the solve. It contains a list of **scopes**. * A **scope** is a package request. If the request isn't a conflict, then a scope @@ -8,26 +8,26 @@ The solve loop performs 5 different types of operations: -* EXTRACTION. This happens when a common dependency is found in all the variants +* **EXTRACT**. This happens when a common dependency is found in all the variants in a scope. For example if every version of pkg 'foo' depends on some version of python, the 'extracted' dependency might be "python-2.6|2.7". An extraction then results in either an INTERSECT or an ADD. -* INTERSECT: This happens when an extracted dependency overlaps with an existing +* **INTERSECT**: This happens when an extracted dependency overlaps with an existing scope. For example "python-2" might be a current scope. Pkg foo's common dependency python-2.6|2.7 would be 'intersected' with this scope. This might result in a conflict, which would cause the whole phase to fail (and possibly the whole solve). Or, as in this case, it narrows an existing scope to 'python-2.6|2.7'. -* ADD: This happens when an extraction is a new pkg request. A new scope is +* **ADD**: This happens when an extraction is a new pkg request. A new scope is created and added to the current list of scopes. -* REDUCE: This is when a scope iterates over all of its variants and removes those +* **REDUCE**: This is when a scope iterates over all of its variants and removes those that conflict with another scope. If this removes all the variants in the scope, the phase has failed - this is called a "total reduction". This type of failure is not common - usually it's a conflicting INTERSECT that causes a failure. -* SPLIT: Once a phase has been extracted/intersected/added/reduced as much as +* **SPLIT**: Once a phase has been extracted/intersected/added/reduced as much as possible (this is called 'exhausted'), we are left with either a solution (each scope contains only a single variant), or an unsolved phase. This is when the algorithm needs to recurse (although it doesn't actually recurse, it uses a stack @@ -47,7 +47,9 @@ grows by 1). If the phase is solved, then we have the solution, and the other phases are discarded. If the phase fails to solve, then it is removed from the stack - if the stack is then empty, then there is no solution. -The pseudocode for a solve looks like this:: +## Pseudocode + +The pseudocode for a solve looks like this: def solve(requests): phase = create_initial_phase(requests) @@ -118,7 +120,16 @@ There are 2 notable points missing from the pseudocode, related to optimisations then creating a new phase would involve a deep copy of the entire state of the solver. -Notes on how to interpret verbose debugging output: +## Interpreting Debugging Output + +Solver debugging is enabled using the *rez-env* *-v* flag. Repeat for more +vebosity, to a max of *-vvv*. This output indicates that a phase is starting. The number indicates the number -of phases that have been solved so far, regardle +of phases that have been solved so far, regardless of how many have failed or +succeeded: + + -------------------------------------------------------------------------------- + SOLVE #1... + -------------------------------------------------------------------------------- + From 18f1bbdaa0de49a133fee9fa5f6fad4e4ed394d2 Mon Sep 17 00:00:00 2001 From: ajohns Date: Sat, 30 Sep 2017 10:44:20 +1000 Subject: [PATCH 088/124] wip docs --- src/rez/SOLVER.md | 46 +++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 43 insertions(+), 3 deletions(-) diff --git a/src/rez/SOLVER.md b/src/rez/SOLVER.md index df72a9499..e65e6cf61 100644 --- a/src/rez/SOLVER.md +++ b/src/rez/SOLVER.md @@ -125,11 +125,51 @@ There are 2 notable points missing from the pseudocode, related to optimisations Solver debugging is enabled using the *rez-env* *-v* flag. Repeat for more vebosity, to a max of *-vvv*. -This output indicates that a phase is starting. The number indicates the number -of phases that have been solved so far, regardless of how many have failed or -succeeded: +### Scope Syntax + +Before describing all the sections of output during a solve, we need to explain +the scope syntax. This describes the state of a scope, and you'll see it a lot +in solver output. + +* `[foo==1.2.0]` This is a scope containing exactly one variant. In this case it + is a *null* variant (a package that has no variants). + +* `[foo-1.2.0[1]]` This is a scope containing exactly one variant. This example + shows the 1-index variant of the package foo-1.2.0 + + [foo-1.2.0[0,1]] + +This is a scope containing two variants from one package version. + + [foo-1.2.0..1.3.5(6)] + +This is a scope containing 6 variants from 6 different package versions, where +the packages are all >= 1.2.0 and <= 1.3.5. + + + + + + request: foo-1.2 bah-3 ~foo-1 + +You will see this once, at the start of the solve. It simply prints the initial +request list. + + merged_request: foo-1.2 bah-3 + +You will see this once and immediately after the `request:` output. It shows a +simplified (merged) version of the initial request. Notice here how `~foo-1` is +gone - this is because the intersection of `foo-1.2` and `~foo-1` is simply +`foo-1.2`. + + pushed {0,0}: [foo==1.2.0[0,1]]* bah[3.0.5..3.4.0(6)]* + + -------------------------------------------------------------------------------- SOLVE #1... -------------------------------------------------------------------------------- +This output indicates that a phase is starting. The number indicates the number +of phases that have been solved so far (1-indexed), regardless of how many have +failed or succeeded: From ac248bb58ec260758a9cf8bee83bba57a1852c2a Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 3 Oct 2017 14:50:58 +1100 Subject: [PATCH 089/124] -fixed bug in solver (YES) -added sortedcontainers 3rd party package (for SortedSet) -updated SOLVER.md -improved comments, varnames in solver.py What was happening: During the main solve loop, reductions happen against added and changed scopes. However one case wasn't taken into account, and that's when an existing in-conflict scope is changed into a non-conflict scope via intersection (eg, ~foo-1 becomes foo-1). This actually needs to trigger a reduction of foo-1 against all other scopes, because the foo scope has WIDENED in this case. This doesn't match any other situation, where if a scope changes, it's because it was NARROWED due to a reduction or intersection. --- src/rez/SOLVER.md | 143 +- src/rez/cli/env.py | 7 +- src/rez/resolved_context.py | 8 +- src/rez/resolver.py | 7 +- src/rez/solver.py | 410 ++- src/rez/utils/_version.py | 2 +- src/rez/vendor/sortedcontainers/LICENSE | 13 + src/rez/vendor/sortedcontainers/__init__.py | 52 + src/rez/vendor/sortedcontainers/sorteddict.py | 741 +++++ src/rez/vendor/sortedcontainers/sortedlist.py | 2492 +++++++++++++++++ src/rez/vendor/sortedcontainers/sortedset.py | 327 +++ 11 files changed, 4022 insertions(+), 180 deletions(-) create mode 100644 src/rez/vendor/sortedcontainers/LICENSE create mode 100644 src/rez/vendor/sortedcontainers/__init__.py create mode 100644 src/rez/vendor/sortedcontainers/sorteddict.py create mode 100644 src/rez/vendor/sortedcontainers/sortedlist.py create mode 100644 src/rez/vendor/sortedcontainers/sortedset.py diff --git a/src/rez/SOLVER.md b/src/rez/SOLVER.md index e65e6cf61..b85602c87 100644 --- a/src/rez/SOLVER.md +++ b/src/rez/SOLVER.md @@ -10,8 +10,12 @@ The solve loop performs 5 different types of operations: * **EXTRACT**. This happens when a common dependency is found in all the variants in a scope. For example if every version of pkg 'foo' depends on some version - of python, the 'extracted' dependency might be "python-2.6|2.7". An extraction - then results in either an INTERSECT or an ADD. + of python, the 'extracted' dependency might be "python-2.6|2.7". + +* **MERGE-EXTRACTIONS**. When one or more scopes are successfully *extracted*, + this results in a list of package requests. This list is then merged into a new + list, which may be unchanged, or simpler, or may cause a conflict. If a conflict + occurs then the phase is in conflict, and fails. * **INTERSECT**: This happens when an extracted dependency overlaps with an existing scope. For example "python-2" might be a current scope. Pkg foo's common dependency @@ -49,7 +53,8 @@ stack - if the stack is then empty, then there is no solution. ## Pseudocode -The pseudocode for a solve looks like this: +The pseudocode for a solve looks like this (and yes, you will have to read the +solver code for full appreciation of what's going on here): def solve(requests): phase = create_initial_phase(requests) @@ -58,6 +63,7 @@ The pseudocode for a solve looks like this: while not solved(): phase = phase_stack.pop() + if phase.failed: phase = phase_stack.pop() # discard previous failed phase @@ -66,6 +72,7 @@ The pseudocode for a solve looks like this: phase_stack.push(next_phase) new_phase = solve_phase(phase) + if new_phase.failed: phase_stack.push(new_phase) # we keep last fail on the stack elif new_phase.solved: @@ -77,35 +84,66 @@ The pseudocode for a solve looks like this: def solve_phase(phase): while True: + changed_scopes = [] + added_scopes = [] + widened_scopes = [] + while True: - foreach phase.scope as x: - extractions |= collect_extractions(x) - - if extractions_present: - foreach phase.scope as x: - intersect(x, extractions) - if failed(x): - set_fail() - return - elif intersected(x): - reductions |= add_reductions_involving(x) - - foreach new_request in extractions: - scope = new_scope(new_request) - reductions |= add_reductions_involving(scope) - phase.add(scope) - else: + extractions = [] + + foreach phase.scope as scope: + extractions |= collect_extractions(scope) + + if not extractions: break - if no intersections and no adds: + merge(extractions) + if in_conflict(extractions): + set_fail() + return + + foreach phase.scope as scope: + intersect(scope, extractions) + + if failed(scope): + set_fail() + return + + if was_intersected(scope): + changed_scopes.add(scope) + + if was_widened(scope): + widened_scopes.add(scope) + + # get those extractions involving new packages + new_extractions = get_new_extractions(extractions) + + # add them as new scopes + foreach request in new_extractions: + scope = new_scope(request) + added_scopes.add(scope) + phase.add(scope) + + if no (changed_scopes or added_scopes or widened_scopes): break - foreach scope_a, scope_b in reductions: - scope_b.reduce_by(scope_a) - if totally_reduced(scope_b): + pending_reductions = convert_to_reduction_set( + changed_scopes, added_scopes, widened_scopes) + + while pending_reductions: + scope_a, scope_b = pending_reductions.pop() + scope_a.reduce_by(scope_b) + + if totally_reduced(scope_a): set_fail() return + # scope_a changed so other scopes need to reduce against it again + if was_reduced(scope_a): + foreach phase.scope as scope: + if scope is not scope_a: + pending_reductions.add(scope, scope_a) + There are 2 notable points missing from the pseudocode, related to optimisations: * Scopes keep a set of package families so that they can quickly skip unnecessary @@ -137,18 +175,18 @@ in solver output. * `[foo-1.2.0[1]]` This is a scope containing exactly one variant. This example shows the 1-index variant of the package foo-1.2.0 - [foo-1.2.0[0,1]] - -This is a scope containing two variants from one package version. - - [foo-1.2.0..1.3.5(6)] - -This is a scope containing 6 variants from 6 different package versions, where -the packages are all >= 1.2.0 and <= 1.3.5. +* `[foo-1.2.0[0,1]]` This is a scope containing two variants from one package version. +* `foo[1.2.0..1.3.5(6)]` This is a scope containing 6 variants from 6 different + package versions, where the packages are all >= 1.2.0 and <= 1.3.5. +* `foo[1.2.0..1.3.5(6:8)]` This is a scope containing 8 variants from 6 different + package versions. +In all of the above cases, you may see a trailing `*`, eg `[foo-1.2.0[0,1]]*`. +This indicates that there are still outstanding *extractions* for this scope. +### Output Steps request: foo-1.2 bah-3 ~foo-1 @@ -164,7 +202,12 @@ gone - this is because the intersection of `foo-1.2` and `~foo-1` is simply pushed {0,0}: [foo==1.2.0[0,1]]* bah[3.0.5..3.4.0(6)]* +This is pushing the initial *phase* onto the *phase stack*. The `{0,0}` means +that: +* There is 1 phase in the stack (this is the zeroeth phase - phases are pushed + and popped from the bottom of the stack); +* Zero other phases have already been solved (or failed) at this depth so far. -------------------------------------------------------------------------------- SOLVE #1... @@ -172,4 +215,38 @@ gone - this is because the intersection of `foo-1.2` and `~foo-1` is simply This output indicates that a phase is starting. The number indicates the number of phases that have been solved so far (1-indexed), regardless of how many have -failed or succeeded: +failed or succeeded. + + popped {0,0}: [foo==1.2.0[0,1]]* bah[3.0.5..3.4.0(6)]* + +This is always the first thing you see after the `SOLVE #1...` output. The +topmost phase is being retrieved from the phase stack. + + EXTRACTING: + extracted python-2 from [foo==1.2.0[0,1]]* + extracted utils-1.2+ from bah[3.0.5..3.4.0(6)]* + +This lists extractions that have occurred from current scopes. + + MERGE-EXTRACTIONS: + merged extractions are: python-2 utils-1.2+ + +This shows the result of merging a set of extracted package requests into a +potentially simpler (or conflicting) set of requests. + + INTERSECTING: + python[2.7.3..3.3.0(3)] was intersected to [python==2.7.3] by range '2' + +This shows scopes that were intersected by previous extractions. + + ADDING: + added utils[1.2.0..5.2.0(12:14)]* + +This shows scopes that were added for new extractions (ie, extractions that +introduce a new package into the solve). + + REDUCING: + removed blah-35.0.2[1] (dep(python-3.6) <--!--> python==2.7.3) + [blah==35.0.2[0,1]] was reduced to [blah==35.0.2[0]]* by python==2.7.3 + +This shows any reductions and the scopes that have changed as a result. diff --git a/src/rez/cli/env.py b/src/rez/cli/env.py index 37628a175..eb78afb30 100644 --- a/src/rez/cli/env.py +++ b/src/rez/cli/env.py @@ -97,6 +97,10 @@ def setup_parser(parser, completions=False): parser.add_argument( "--detached", action="store_true", help="open a separate terminal") + parser.add_argument( + "--no-passive", action="store_true", + help="only print actions that affect the solve (has an effect only " + "when verbosity is enabled)") parser.add_argument( "--pre-command", type=str, help=SUPPRESS) PKG_action = parser.add_argument( @@ -192,7 +196,8 @@ def command(opts, parser, extra_arg_groups=None): verbosity=opts.verbose, max_fails=opts.max_fails, time_limit=opts.time_limit, - caching=(not opts.no_cache)) + caching=(not opts.no_cache), + suppress_passive=opts.no_passive) success = (context.status == ResolverStatus.solved) if not success: diff --git a/src/rez/resolved_context.py b/src/rez/resolved_context.py index 237103be7..186aa88c2 100644 --- a/src/rez/resolved_context.py +++ b/src/rez/resolved_context.py @@ -139,7 +139,7 @@ def __init__(self, package_requests, verbosity=0, timestamp=None, building=False, caching=None, package_paths=None, package_filter=None, package_orderers=None, max_fails=-1, add_implicit_packages=True, time_limit=-1, callback=None, - package_load_callback=None, buf=None): + package_load_callback=None, buf=None, suppress_passive=False): """Perform a package resolve, and store the result. Args: @@ -170,6 +170,9 @@ def __init__(self, package_requests, verbosity=0, timestamp=None, `Package` object. buf (file-like object): Where to print verbose output to, defaults to stdout. + suppress_passive (bool): If True, don't print debugging info that + has had no effect on the solve. This argument only has an + effect if `verbosity` > 2. """ self.load_path = None @@ -259,7 +262,8 @@ def _package_load_callback(package): callback=callback_, package_load_callback=_package_load_callback, verbosity=verbosity, - buf=buf) + buf=buf, + suppress_passive=suppress_passive) resolver.solve() # convert the results diff --git a/src/rez/resolver.py b/src/rez/resolver.py index f7ce98b01..ccaec96d4 100644 --- a/src/rez/resolver.py +++ b/src/rez/resolver.py @@ -33,7 +33,8 @@ class Resolver(object): """ def __init__(self, context, package_requests, package_paths, package_filter=None, package_orderers=None, timestamp=0, callback=None, building=False, - verbosity=False, buf=None, package_load_callback=None, caching=True): + verbosity=False, buf=None, package_load_callback=None, caching=True, + suppress_passive=False): """Create a Resolver. Args: @@ -61,6 +62,7 @@ def __init__(self, context, package_requests, package_paths, package_filter=None self.verbosity = verbosity self.caching = caching self.buf = buf + self.suppress_passive = suppress_passive # store hash of package orderers. This is used in the memcached key if package_orderers: @@ -383,7 +385,8 @@ def _solve(self): building=self.building, verbosity=self.verbosity, prune_unfailed=config.prune_failed_graph, - buf=self.buf) + buf=self.buf, + suppress_passive=self.suppress_passive) solver.solve() return solver diff --git a/src/rez/solver.py b/src/rez/solver.py index 490415865..1f88df559 100644 --- a/src/rez/solver.py +++ b/src/rez/solver.py @@ -21,9 +21,16 @@ from rez.vendor.version.requirement import VersionedObject, Requirement, \ RequirementList from rez.vendor.enum import Enum +from rez.vendor.sortedcontainers.sortedset import SortedSet import copy import time import sys +import os + + +# a hidden control for forcing to non-optimized solving mode. This is here as +# first port of call for narrowing down the cause of a solver bug if we see one +_force_unoptimised_solver = (os.getenv("_FORCE_REZ_UNOPTIMISED_SOLVER") == "1") class VariantSelectMode(Enum): @@ -57,9 +64,10 @@ class SolverCallbackReturn(Enum): class _Printer(object): - def __init__(self, verbosity, buf=None): + def __init__(self, verbosity, buf=None, suppress_passive=False): self.verbosity = verbosity self.buf = buf or sys.stdout + self.suppress_passive = suppress_passive self.pending_sub = None self.pending_br = False self.last_pr = True @@ -91,9 +99,11 @@ def __call__(self, txt, *args): self.last_pr = True self.pending_br = False - def important(self, txt, *args): - if self.verbosity > 1: - self.pr(txt % args) + def passive(self, txt, *args): + if self.suppress_passive: + return + + self(txt, *args) def br(self): self.pending_br = True @@ -154,7 +164,7 @@ def __eq__(self, other): self.conflicting_request == other.conflicting_request) def __str__(self): - return "%s --> %s <--!--> %s)" \ + return "%s (dep(%s) <--!--> %s)" \ % (self.reducee_str(), self.dependency, self.conflicting_request) @@ -589,7 +599,7 @@ def intersect(self, range_): return self if self.pr: - self.pr("intersecting %s wrt range '%s'...", self, range_) + self.pr.passive("intersecting %s wrt range '%s'...", self, range_) # this is faster than iter_intersecting :( entries = [x for x in self.entries if x.version in range_] @@ -621,7 +631,7 @@ def reduce_by(self, package_request): if self.pr: reqstr = _short_req_str(package_request) - self.pr("reducing %s wrt %s...", self, reqstr) + self.pr.passive("reducing %s wrt %s...", self, reqstr) entries = [] reductions = [] @@ -650,10 +660,7 @@ def _conflicts(req_): reductions.append(red) if self.pr: - self.pr("removed %s (dep(%s) <--!--> %s)", - red.reducee_str(), - red.dependency, - red.conflicting_request) + self.pr("removed %s", red) else: new_variants.append(variant) @@ -951,19 +958,23 @@ def intersect(self, range_): else: new_slice = self.variant_slice.intersect(range_) + # intersection reduced the scope to nothing if new_slice is None: if self.pr: self.pr("%s intersected with range '%s' resulted in no packages", self, range_) return None - elif new_slice is not self.variant_slice: + + # intersection narrowed the scope + if new_slice is not self.variant_slice: scope = self._copy(new_slice) if self.pr: self.pr("%s was intersected to %s by range '%s'", self, scope, range_) return scope - else: - return self + + # intersection did not change the scope + return self def reduce_by(self, package_request): """Reduce this scope wrt a package request. @@ -971,26 +982,41 @@ def reduce_by(self, package_request): Returns: A (_PackageScope, [Reduction]) tuple, where the scope is a new scope copy with reductions applied, or self if there were no - reductions, or None if the slice was completely reduced. + reductions, or None if the scope was completely reduced. """ - if not self.package_request.conflict: - new_slice, reductions = self.variant_slice.reduce_by(package_request) + if self.package_request.conflict: + # conflict scopes don't reduce. Instead, other scopes will be + # reduced against a conflict scope. + return (self, []) - if new_slice is None: - if self.pr: - reqstr = _short_req_str(package_request) - self.pr("%s was reduced to nothing by %s", self, reqstr) - self.pr.br() - return (None, reductions) - elif new_slice is not self.variant_slice: - scope = self._copy(new_slice) + self.solver.reduction_tests_count += 1 - if self.pr: - reqstr = _short_req_str(package_request) - self.pr("%s was reduced to %s by %s", self, scope, reqstr) - self.pr.br() - return (scope, reductions) + # perform the reduction + new_slice, reductions = self.variant_slice.reduce_by(package_request) + # there was total reduction + if new_slice is None: + self.solver.reductions_count += 1 + + if self.pr: + reqstr = _short_req_str(package_request) + self.pr("%s was reduced to nothing by %s", self, reqstr) + self.pr.br() + + return (None, reductions) + + # there was some reduction + if new_slice is not self.variant_slice: + self.solver.reductions_count += 1 + scope = self._copy(new_slice) + + if self.pr: + reqstr = _short_req_str(package_request) + self.pr("%s was reduced to %s by %s", self, scope, reqstr) + self.pr.br() + return (scope, reductions) + + # there was no reduction return (self, []) def extract(self): @@ -1058,8 +1084,10 @@ def _update(self): self.package_name, self.variant_slice.range_) def __str__(self): - return str(self.variant_slice) if self.variant_slice \ - else str(self.package_request) + if self.variant_slice is None: + return "req(%s)" % str(self.package_request) + else: + return str(self.variant_slice) def _get_dependency_order(g, node_list): @@ -1112,11 +1140,8 @@ def __init__(self, solver): scope = _PackageScope(package_request, solver=solver) self.scopes.append(scope) - self.pending_reducts = set() - for i in range(len(self.scopes)): - for j in range(len(self.scopes)): - if i != j: - self.pending_reducts.add((i, j)) + # only so an initial reduction across all scopes happens in a new phase + self.changed_scopes_i = set(range(len(self.scopes))) @property def pr(self): @@ -1130,14 +1155,15 @@ def solve(self): scopes = self.scopes[:] failure_reason = None extractions = {} - pending_reducts = self.pending_reducts.copy() + + changed_scopes_i = self.changed_scopes_i.copy() def _create_phase(status=None): phase = copy.copy(self) phase.scopes = scopes phase.failure_reason = failure_reason phase.extractions = extractions - phase.pending_reducts = set() + phase.changed_scopes_i = set() if status is None: phase.status = (SolverStatus.solved if phase._is_solved() @@ -1146,115 +1172,174 @@ def _create_phase(status=None): phase.status = status return phase + # iteratively reduce until no more reductions possible while True: + prev_num_scopes = len(scopes) + widened_scopes_i = set() + # iteratively extract until no more extractions possible while True: self.pr.subheader("EXTRACTING:") - common_requests = [] + extracted_requests = [] + # perform all possible extractions for i in range(len(scopes)): while True: - scope_, common_request = scopes[i].extract() - if common_request: - common_requests.append(common_request) - k = (scopes[i].package_name, common_request.name) - extractions[k] = common_request + scope_, extracted_request = scopes[i].extract() + + if extracted_request: + extracted_requests.append(extracted_request) + k = (scopes[i].package_name, extracted_request.name) + extractions[k] = extracted_request + self.solver.extractions_count += 1 scopes[i] = scope_ else: break - if common_requests: - request_list = RequirementList(common_requests) + if not extracted_requests: + break - if request_list.conflict: # extractions are in conflict - req1, req2 = request_list.conflict - conflict = DependencyConflict(req1, req2) - failure_reason = DependencyConflicts([conflict]) - return _create_phase(SolverStatus.failed) - else: + # simplify extractions (there may be overlaps) + self.pr.subheader("MERGE-EXTRACTIONS:") + extracted_requests = RequirementList(extracted_requests) + + if extracted_requests.conflict: # extractions are in conflict + req1, req2 = extracted_requests.conflict + conflict = DependencyConflict(req1, req2) + failure_reason = DependencyConflicts([conflict]) + return _create_phase(SolverStatus.failed) + elif self.pr: + self.pr("merged extractions: %s", extracted_requests) + + # intersect extracted requests with current scopes + self.pr.subheader("INTERSECTING:") + req_fams = [] + + for i, scope in enumerate(scopes): + extracted_req = extracted_requests.get(scope.package_name) + + if extracted_req is not None: + # perform the intersection + scope_ = scope.intersect(extracted_req.range) + + req_fams.append(extracted_req.name) + + if scope_ is None: + # the scope conflicted with the extraction + conflict = DependencyConflict( + extracted_req, scope.package_request) + failure_reason = DependencyConflicts([conflict]) + return _create_phase(SolverStatus.failed) + + elif scope_ is not scope: + # the scope was narrowed because it intersected + # with an extraction + scopes[i] = scope_ + changed_scopes_i.add(i) + self.solver.intersections_count += 1 + + # if the intersection caused a conflict scope to turn + # into a non-conflict scope, then it has to be reduced + # against all other scopes. + # + # In the very common case, if a scope changes then it + # has been narrowed, so there is no need to reduce it + # against other unchanged scopes. In this case however, + # the scope actually widens! For eg, '~foo-1' may be + # intersected with 'foo' to become 'foo-1', which might + # then reduce against existing scopes. + # + if scope.is_conflict and not scope_.is_conflict: + widened_scopes_i.add(i) + + # add new scopes + new_extracted_reqs = [ + x for x in extracted_requests.requirements + if x.name not in req_fams] + + if new_extracted_reqs: + self.pr.subheader("ADDING:") + #n = len(scopes) + + for req in new_extracted_reqs: + scope = _PackageScope(req, solver=self.solver) + scopes.append(scope) if self.pr: - self.pr("merged extractions are: %s", request_list) - - # do intersections with existing scopes - self.pr.subheader("INTERSECTING:") - req_fams = [] - - for i, scope in enumerate(scopes): - req = request_list.get(scope.package_name) - if req is not None: - scope_ = scope.intersect(req.range) - req_fams.append(req.name) - - if scope_ is None: - conflict = DependencyConflict( - req, scope.package_request) - failure_reason = DependencyConflicts([conflict]) - return _create_phase(SolverStatus.failed) - elif scope_ is not scope: - scopes[i] = scope_ - for j in range(len(scopes)): - if j != i: - pending_reducts.add((i, j)) - - # add new scopes - new_reqs = [x for x in request_list.requirements - if x.name not in req_fams] - - if new_reqs: - self.pr.subheader("ADDING:") - n = len(scopes) - - for req in new_reqs: - scope = _PackageScope(req, solver=self.solver) - scopes.append(scope) - if self.pr: - self.pr("added %s", scope) - - m = len(new_reqs) - for i in range(n, n + m): - for j in range(n + m): - if i != j: - pending_reducts.add((i, j)) - - for i in range(n): - for j in range(n, n + m): - pending_reducts.add((i, j)) - else: - break + self.pr("added %s", scope) + + num_scopes = len(scopes) - if not pending_reducts: + # no further reductions to do + if (num_scopes == prev_num_scopes) \ + and not changed_scopes_i \ + and not widened_scopes_i: break # iteratively reduce until no more reductions possible self.pr.subheader("REDUCING:") if not self.solver.optimised: - # check all variants for reduction regardless - pending_reducts = set() - for i in range(len(scopes)): - for j in range(len(scopes)): - if i != j: - pending_reducts.add((i, j)) - + # force reductions across all scopes + changed_scopes_i = set(range(num_scopes)) + prev_num_scopes = num_scopes + + # create set of pending reductions from the list of changed scopes + # and list of added scopes. We use a sorted set because the solver + # must be deterministic, ie its behavior must always be the same for + # a given solve. A normal set does not guarantee order. + # + # Each item is an (x, y) tuple, where scope[x] will reduce by + # scope[y].package_request. + # + pending_reducts = SortedSet() + all_scopes_i = range(num_scopes) + added_scopes_i = range(prev_num_scopes, num_scopes) + + for x in range(prev_num_scopes): + # existing scopes must reduce against changed scopes + for y in changed_scopes_i: + if x != y: + pending_reducts.add((x, y)) + + # existing scopes must reduce against newly added scopes + for y in added_scopes_i: + pending_reducts.add((x, y)) + + # newly added scopes must reduce against all other scopes + for x in added_scopes_i: + for y in all_scopes_i: + if x != y: + pending_reducts.add((x, y)) + + # 'widened' scopes (see earlier comment in this func) must reduce + # against all other scopes + for x in widened_scopes_i: + for y in all_scopes_i: + if x != y: + pending_reducts.add((x, y)) + + # iteratively reduce until there are no more pending reductions. + # Note that if a scope is reduced, then other scopes need to reduce + # against it once again. while pending_reducts: - new_pending_reducts = set() + x, y = pending_reducts.pop() + + new_scope, reductions = scopes[x].reduce_by( + scopes[y].package_request) + + if new_scope is None: + failure_reason = TotalReduction(reductions) + return _create_phase(SolverStatus.failed) - # the sort here gives reproducible results, since order of - # reducts affects the result - for i, j in sorted(pending_reducts): - new_scope, reductions = scopes[j].reduce_by( - scopes[i].package_request) + elif new_scope is not scopes[x]: + scopes[x] = new_scope - if new_scope is None: - failure_reason = TotalReduction(reductions) - return _create_phase(SolverStatus.failed) - elif new_scope is not scopes[j]: - scopes[j] = new_scope - for i in range(len(scopes)): - if i != j: - new_pending_reducts.add((j, i)) + # other scopes need to reduce against x again + for j in all_scopes_i: + if j != x: + pending_reducts.add((j, x)) - pending_reducts = new_pending_reducts + changed_scopes_i = set() return _create_phase() @@ -1326,28 +1411,33 @@ def split(self): scopes = [] next_scopes = [] - split = None + split_i = None for i, scope in enumerate(self.scopes): - if split is None: + if split_i is None: r = scope.split() if r is not None: scope_, next_scope = r scopes.append(scope_) next_scopes.append(next_scope) - split = i + split_i = i continue scopes.append(scope) next_scopes.append(scope) + assert split_i is not None + phase = copy.copy(self) phase.scopes = scopes phase.status = SolverStatus.pending + phase.changed_scopes_i = set([split_i]) - for i in range(len(phase.scopes)): - if i != split: - phase.pending_reducts.add((split, i)) + # because a scope was narrowed by a split, other scopes need to be + # reduced against it + #for i in range(len(phase.scopes)): + # if i != split_i: + # phase.pending_reducts.add((i, split_i)) next_phase = copy.copy(phase) next_phase.scopes = next_scopes @@ -1648,7 +1738,8 @@ class Solver(_Common): def __init__(self, package_requests, package_paths, context=None, package_filter=None, package_orderers=None, callback=None, building=False, optimised=True, verbosity=0, buf=None, - package_load_callback=None, prune_unfailed=True): + package_load_callback=None, prune_unfailed=True, + suppress_passive=False): """Create a Solver. Args: @@ -1677,12 +1768,14 @@ def __init__(self, package_requests, package_paths, context=None, prune_unfailed (bool): If the solve failed, and `prune_unfailed` is True, any packages unrelated to the conflict are removed from the graph. + suppress_passive (bool): If True, don't print debugging info that + has had no effect on the solve. This argument only has an + effect if `verbosity` > 2. """ self.package_paths = package_paths self.package_filter = package_filter self.package_orderers = package_orderers - self.pr = _Printer(verbosity, buf=buf) - self.optimised = optimised + self.pr = _Printer(verbosity, buf=buf, suppress_passive=suppress_passive) self.callback = callback self.prune_unfailed = prune_unfailed self.package_load_callback = package_load_callback @@ -1690,6 +1783,11 @@ def __init__(self, package_requests, package_paths, context=None, self.request_list = None self.context = context + if _force_unoptimised_solver: + self.optimised = False + else: + self.optimised = optimised + self.non_conflict_package_requests = [x for x in package_requests if not x.conflict] @@ -1697,11 +1795,18 @@ def __init__(self, package_requests, package_paths, context=None, self.failed_phase_list = None self.abort_reason = None self.callback_return = None - self.solve_count = None self.depth_counts = None self.solve_time = None self.load_time = None self.solve_begun = None + + # advanced solve metrics + self.solve_count = 0 + self.extractions_count = 0 + self.intersections_count = 0 + self.reductions_count = 0 + self.reduction_tests_count = 0 + self._init() self.package_cache = PackageVariantCache(self) @@ -1815,6 +1920,21 @@ def solve(self): self.load_time = package_repo_stats.package_load_time - pt1 self.solve_time = time.time() - t1 + # print stats + if self.pr: + d = { + "num_solves": self.solve_count, + "num_fails": self.num_fails, + "num_extractions": self.extractions_count, + "num_intersections": self.intersections_count, + "num_reductions": self.reductions_count, + "num_reduction_tests": self.reduction_tests_count + } + + from pprint import pformat + self.pr.subheader("SOLVE STATS:") + self.pr(pformat(d)) + def solve_step(self): """Perform a single solve step. """ @@ -1823,7 +1943,9 @@ def solve_step(self): return if self.pr: - self.pr.header("SOLVE #%d...", self.solve_count + 1) + self.pr.header("SOLVE #%d (%d fails so far)...", + self.solve_count + 1, self.num_fails) + phase = self._pop_phase() if phase.status == SolverStatus.failed: # a previously failed phase @@ -1840,15 +1962,16 @@ def solve_step(self): new_phase = phase.solve() self.solve_count += 1 - self.pr.subheader("RESULT:") if new_phase.status == SolverStatus.failed: - self.pr("phase failed to resolve") + self.pr.subheader("FAILED:") self._push_phase(new_phase) if self.pr and len(self.phase_stack) == 1: self.pr.header("FAIL: there is no solution") + elif new_phase.status == SolverStatus.solved: # solved, but there may be cyclic dependencies + self.pr.subheader("SOLVED:") final_phase = new_phase.finalise() self._push_phase(final_phase) @@ -1859,12 +1982,11 @@ def solve_step(self): self.pr.header("SUCCESS") self.pr("solve time: %.2f seconds", self.solve_time) self.pr("load time: %.2f seconds", self.load_time) + else: + self.pr.subheader("EXHAUSTED:") assert(new_phase.status == SolverStatus.exhausted) self._push_phase(new_phase) - if self.pr: - s = SolverState(self.num_solves, self.num_fails, new_phase) - self.pr.important(str(s)) def failure_reason(self, failure_index=None): """Get the reason for a failure. @@ -1964,10 +2086,16 @@ def dump(self): def _init(self): self.phase_stack = [] self.failed_phase_list = [] - self.solve_count = 0 self.depth_counts = {} self.solve_time = 0.0 self.load_time = 0.0 + + self.solve_count = 0 + self.extractions_count = 0 + self.intersections_count = 0 + self.reductions_count = 0 + self.reduction_tests_count = 0 + self.solve_begun = False def _latest_nonfailed_phase(self): diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index 90543bdae..5f272621f 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.13.1" +_rez_version = "2.14.0" try: from rez.vendor.version.version import Version diff --git a/src/rez/vendor/sortedcontainers/LICENSE b/src/rez/vendor/sortedcontainers/LICENSE new file mode 100644 index 000000000..8794014e0 --- /dev/null +++ b/src/rez/vendor/sortedcontainers/LICENSE @@ -0,0 +1,13 @@ +Copyright 2014-2016 Grant Jenks + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/src/rez/vendor/sortedcontainers/__init__.py b/src/rez/vendor/sortedcontainers/__init__.py new file mode 100644 index 000000000..54b2bf67f --- /dev/null +++ b/src/rez/vendor/sortedcontainers/__init__.py @@ -0,0 +1,52 @@ +"""Sorted Container Types: SortedList, SortedDict, SortedSet + +SortedContainers is an Apache2 licensed containers library, written in +pure-Python, and fast as C-extensions. + + +Python's standard library is great until you need a sorted collections +type. Many will attest that you can get really far without one, but the moment +you **really need** a sorted list, dict, or set, you're faced with a dozen +different implementations, most using C-extensions without great documentation +and benchmarking. + +In Python, we can do better. And we can do it in pure-Python! + +:: + + >>> from sortedcontainers import SortedList, SortedDict, SortedSet + >>> sl = SortedList(xrange(10000000)) + >>> 1234567 in sl + True + >>> sl[7654321] + 7654321 + >>> sl.add(1234567) + >>> sl.count(1234567) + 2 + >>> sl *= 3 + >>> len(sl) + 30000003 + +SortedContainers takes all of the work out of Python sorted types - making your +deployment and use of Python easy. There's no need to install a C compiler or +pre-build and distribute custom extensions. Performance is a feature and +testing has 100% coverage with unit tests and hours of stress. + +:copyright: (c) 2016 by Grant Jenks. +:license: Apache 2.0, see LICENSE for more details. + +""" + + +from .sortedlist import SortedList, SortedListWithKey +from .sortedset import SortedSet +from .sorteddict import SortedDict + +__all__ = ['SortedList', 'SortedSet', 'SortedDict', 'SortedListWithKey'] + +__title__ = 'sortedcontainers' +__version__ = '1.5.7' +__build__ = 0x010507 +__author__ = 'Grant Jenks' +__license__ = 'Apache 2.0' +__copyright__ = 'Copyright 2016 Grant Jenks' diff --git a/src/rez/vendor/sortedcontainers/sorteddict.py b/src/rez/vendor/sortedcontainers/sorteddict.py new file mode 100644 index 000000000..5d425fee6 --- /dev/null +++ b/src/rez/vendor/sortedcontainers/sorteddict.py @@ -0,0 +1,741 @@ +"""Sorted dictionary implementation. + +""" + +from collections import Set, Sequence +from collections import KeysView as AbstractKeysView +from collections import ValuesView as AbstractValuesView +from collections import ItemsView as AbstractItemsView +from sys import hexversion + +from .sortedlist import SortedList, recursive_repr, SortedListWithKey +from .sortedset import SortedSet + +NONE = object() + + +class _IlocWrapper(object): + "Positional indexing support for sorted dictionary objects." + # pylint: disable=protected-access, too-few-public-methods + def __init__(self, _dict): + self._dict = _dict + def __len__(self): + return len(self._dict) + def __getitem__(self, index): + """ + Very efficiently return the key at index *index* in iteration. Supports + negative indices and slice notation. Raises IndexError on invalid + *index*. + """ + return self._dict._list[index] + def __delitem__(self, index): + """ + Remove the ``sdict[sdict.iloc[index]]`` from *sdict*. Supports negative + indices and slice notation. Raises IndexError on invalid *index*. + """ + _dict = self._dict + _list = _dict._list + _delitem = _dict._delitem + + if isinstance(index, slice): + keys = _list[index] + del _list[index] + for key in keys: + _delitem(key) + else: + key = _list[index] + del _list[index] + _delitem(key) + + +class SortedDict(dict): + """SortedDict provides the same methods as a dict. Additionally, SortedDict + efficiently maintains its keys in sorted order. Consequently, the keys + method will return the keys in sorted order, the popitem method will remove + the item with the highest key, etc. + + """ + def __init__(self, *args, **kwargs): + """SortedDict provides the same methods as a dict. Additionally, SortedDict + efficiently maintains its keys in sorted order. Consequently, the keys + method will return the keys in sorted order, the popitem method will + remove the item with the highest key, etc. + + An optional *key* argument defines a callable that, like the `key` + argument to Python's `sorted` function, extracts a comparison key from + each dict key. If no function is specified, the default compares the + dict keys directly. The `key` argument must be provided as a positional + argument and must come before all other arguments. + + An optional *iterable* argument provides an initial series of items to + populate the SortedDict. Each item in the series must itself contain + two items. The first is used as a key in the new dictionary, and the + second as the key's value. If a given key is seen more than once, the + last value associated with it is retained in the new dictionary. + + If keyword arguments are given, the keywords themselves with their + associated values are added as items to the dictionary. If a key is + specified both in the positional argument and as a keyword argument, the + value associated with the keyword is retained in the dictionary. For + example, these all return a dictionary equal to ``{"one": 2, "two": + 3}``: + + * ``SortedDict(one=2, two=3)`` + * ``SortedDict({'one': 2, 'two': 3})`` + * ``SortedDict(zip(('one', 'two'), (2, 3)))`` + * ``SortedDict([['two', 3], ['one', 2]])`` + + The first example only works for keys that are valid Python + identifiers; the others work with any valid keys. + + """ + # pylint: disable=super-init-not-called + if args and (args[0] is None or callable(args[0])): + self._key = args[0] + args = args[1:] + else: + self._key = None + + if self._key is None: + self._list = SortedList() + else: + self._list = SortedListWithKey(key=self._key) + + # Cache function pointers to dict methods. + + _dict = super(SortedDict, self) + self._dict = _dict + self._clear = _dict.clear + self._delitem = _dict.__delitem__ + self._iter = _dict.__iter__ + self._pop = _dict.pop + self._setdefault = _dict.setdefault + self._setitem = _dict.__setitem__ + self._dict_update = _dict.update + + # Cache function pointers to SortedList methods. + + _list = self._list + self._list_add = _list.add + self.bisect_left = _list.bisect_left + self.bisect = _list.bisect_right + self.bisect_right = _list.bisect_right + self._list_clear = _list.clear + self.index = _list.index + self._list_pop = _list.pop + self._list_remove = _list.remove + self._list_update = _list.update + self.irange = _list.irange + self.islice = _list.islice + self._reset = _list._reset # pylint: disable=protected-access + + if self._key is not None: + self.bisect_key_left = _list.bisect_key_left + self.bisect_key_right = _list.bisect_key_right + self.bisect_key = _list.bisect_key + self.irange_key = _list.irange_key + + self.iloc = _IlocWrapper(self) + + self._update(*args, **kwargs) + + @property + def key(self): + """Key function used to extract comparison key for sorting.""" + return self._key + + def clear(self): + """Remove all elements from the dictionary.""" + self._clear() + self._list_clear() + + def __delitem__(self, key): + """ + Remove ``d[key]`` from *d*. Raises a KeyError if *key* is not in the + dictionary. + """ + self._delitem(key) + self._list_remove(key) + + def __iter__(self): + """ + Return an iterator over the sorted keys of the dictionary. + + Iterating the Mapping while adding or deleting keys may raise a + `RuntimeError` or fail to iterate over all entries. + """ + return iter(self._list) + + def __reversed__(self): + """ + Return a reversed iterator over the sorted keys of the dictionary. + + Iterating the Mapping while adding or deleting keys may raise a + `RuntimeError` or fail to iterate over all entries. + """ + return reversed(self._list) + + def __setitem__(self, key, value): + """Set `d[key]` to *value*.""" + if key not in self: + self._list_add(key) + self._setitem(key, value) + + def copy(self): + """Return a shallow copy of the sorted dictionary.""" + return self.__class__(self._key, self._iteritems()) + + __copy__ = copy + + @classmethod + def fromkeys(cls, seq, value=None): + """ + Create a new dictionary with keys from *seq* and values set to *value*. + """ + return cls((key, value) for key in seq) + + if hexversion < 0x03000000: + def items(self): + """ + Return a list of the dictionary's items (``(key, value)`` pairs). + """ + return list(self._iteritems()) + else: + def items(self): + """ + Return a new ItemsView of the dictionary's items. In addition to + the methods provided by the built-in `view` the ItemsView is + indexable (e.g. ``d.items()[5]``). + """ + return ItemsView(self) + + def iteritems(self): + """ + Return an iterator over the items (``(key, value)`` pairs). + + Iterating the Mapping while adding or deleting keys may raise a + `RuntimeError` or fail to iterate over all entries. + """ + return iter((key, self[key]) for key in self._list) + + _iteritems = iteritems + + if hexversion < 0x03000000: + def keys(self): + """Return a SortedSet of the dictionary's keys.""" + return SortedSet(self._list, key=self._key) + else: + def keys(self): + """ + Return a new KeysView of the dictionary's keys. In addition to the + methods provided by the built-in `view` the KeysView is indexable + (e.g. ``d.keys()[5]``). + """ + return KeysView(self) + + def iterkeys(self): + """ + Return an iterator over the sorted keys of the Mapping. + + Iterating the Mapping while adding or deleting keys may raise a + `RuntimeError` or fail to iterate over all entries. + """ + return iter(self._list) + + if hexversion < 0x03000000: + def values(self): + """Return a list of the dictionary's values.""" + return list(self._itervalues()) + else: + def values(self): + """ + Return a new :class:`ValuesView` of the dictionary's values. + In addition to the methods provided by the built-in `view` the + ValuesView is indexable (e.g., ``d.values()[5]``). + """ + return ValuesView(self) + + def itervalues(self): + """ + Return an iterator over the values of the Mapping. + + Iterating the Mapping while adding or deleting keys may raise a + `RuntimeError` or fail to iterate over all entries. + """ + return iter(self[key] for key in self._list) + + _itervalues = itervalues + + def pop(self, key, default=NONE): + """ + If *key* is in the dictionary, remove it and return its value, + else return *default*. If *default* is not given and *key* is not in + the dictionary, a KeyError is raised. + """ + if key in self: + self._list_remove(key) + return self._pop(key) + else: + if default is NONE: + raise KeyError(key) + else: + return default + + def popitem(self, last=True): + """ + Remove and return a ``(key, value)`` pair from the dictionary. If + last=True (default) then remove the *greatest* `key` from the + diciontary. Else, remove the *least* key from the dictionary. + + If the dictionary is empty, calling `popitem` raises a + KeyError`. + """ + if not self: + raise KeyError('popitem(): dictionary is empty') + + key = self._list_pop(-1 if last else 0) + value = self._pop(key) + + return (key, value) + + def peekitem(self, index=-1): + """Return (key, value) item pair at index. + + Unlike ``popitem``, the sorted dictionary is not modified. Index + defaults to -1, the last/greatest key in the dictionary. Specify + ``index=0`` to lookup the first/least key in the dictiony. + + If index is out of range, raise IndexError. + + """ + key = self._list[index] + return key, self[key] + + def setdefault(self, key, default=None): + """ + If *key* is in the dictionary, return its value. If not, insert *key* + with a value of *default* and return *default*. *default* defaults to + ``None``. + """ + if key in self: + return self[key] + + self._setitem(key, default) + self._list_add(key) + return default + + def update(self, *args, **kwargs): + """ + Update the dictionary with the key/value pairs from *other*, overwriting + existing keys. + + *update* accepts either another dictionary object or an iterable of + key/value pairs (as a tuple or other iterable of length two). If + keyword arguments are specified, the dictionary is then updated with + those key/value pairs: ``d.update(red=1, blue=2)``. + """ + if not self: + self._dict_update(*args, **kwargs) + self._list_update(self._iter()) + return + + if not kwargs and len(args) == 1 and isinstance(args[0], dict): + pairs = args[0] + else: + pairs = dict(*args, **kwargs) + + if (10 * len(pairs)) > len(self): + self._dict_update(pairs) + self._list_clear() + self._list_update(self._iter()) + else: + for key in pairs: + self[key] = pairs[key] + + _update = update + + if hexversion >= 0x02070000: + def viewkeys(self): + "Return ``KeysView`` of dictionary keys." + return KeysView(self) + + def viewvalues(self): + "Return ``ValuesView`` of dictionary values." + return ValuesView(self) + + def viewitems(self): + "Return ``ItemsView`` of dictionary (key, value) item pairs." + return ItemsView(self) + + def __reduce__(self): + return (self.__class__, (self._key, list(self._iteritems()))) + + @recursive_repr + def __repr__(self): + _key = self._key + name = type(self).__name__ + key = '' if _key is None else '{0!r}, '.format(_key) + func = '{0!r}: {1!r}'.format + items = ', '.join(func(key, self[key]) for key in self._list) + return '{0}({1}{{{2}}})'.format(name, key, items) + + def _check(self): + # pylint: disable=protected-access + self._list._check() + assert len(self) == len(self._list) + assert all(key in self for key in self._list) + + +class KeysView(AbstractKeysView, Set, Sequence): + """ + A KeysView object is a dynamic view of the dictionary's keys, which + means that when the dictionary's keys change, the view reflects + those changes. + + The KeysView class implements the Set and Sequence Abstract Base Classes. + """ + # pylint: disable=too-many-ancestors + if hexversion < 0x03000000: + def __init__(self, sorted_dict): + """ + Initialize a KeysView from a SortedDict container as *sorted_dict*. + """ + # pylint: disable=super-init-not-called, protected-access + self._list = sorted_dict._list + self._view = sorted_dict._dict.viewkeys() + else: + def __init__(self, sorted_dict): + """ + Initialize a KeysView from a SortedDict container as *sorted_dict*. + """ + # pylint: disable=super-init-not-called, protected-access + self._list = sorted_dict._list + self._view = sorted_dict._dict.keys() + def __len__(self): + """Return the number of entries in the dictionary.""" + return len(self._view) + def __contains__(self, key): + """ + Return True if and only if *key* is one of the underlying dictionary's + keys. + """ + return key in self._view + def __iter__(self): + """ + Return an iterable over the keys in the dictionary. Keys are iterated + over in their sorted order. + + Iterating views while adding or deleting entries in the dictionary may + raise a `RuntimeError` or fail to iterate over all entries. + """ + return iter(self._list) + def __getitem__(self, index): + """Return the key at position *index*.""" + return self._list[index] + def __reversed__(self): + """ + Return a reversed iterable over the keys in the dictionary. Keys are + iterated over in their reverse sort order. + + Iterating views while adding or deleting entries in the dictionary may + raise a RuntimeError or fail to iterate over all entries. + """ + return reversed(self._list) + def index(self, value, start=None, stop=None): + """ + Return the smallest *k* such that `keysview[k] == value` and `start <= k + < end`. Raises `KeyError` if *value* is not present. *stop* defaults + to the end of the set. *start* defaults to the beginning. Negative + indexes are supported, as for slice indices. + """ + # pylint: disable=arguments-differ + return self._list.index(value, start, stop) + def count(self, value): + """Return the number of occurrences of *value* in the set.""" + return 1 if value in self._view else 0 + def __eq__(self, that): + """Test set-like equality with *that*.""" + return self._view == that + def __ne__(self, that): + """Test set-like inequality with *that*.""" + return self._view != that + def __lt__(self, that): + """Test whether self is a proper subset of *that*.""" + return self._view < that + def __gt__(self, that): + """Test whether self is a proper superset of *that*.""" + return self._view > that + def __le__(self, that): + """Test whether self is contained within *that*.""" + return self._view <= that + def __ge__(self, that): + """Test whether *that* is contained within self.""" + return self._view >= that + def __and__(self, that): + """Return a SortedSet of the intersection of self and *that*.""" + return SortedSet(self._view & that) + def __or__(self, that): + """Return a SortedSet of the union of self and *that*.""" + return SortedSet(self._view | that) + def __sub__(self, that): + """Return a SortedSet of the difference of self and *that*.""" + return SortedSet(self._view - that) + def __xor__(self, that): + """Return a SortedSet of the symmetric difference of self and *that*.""" + return SortedSet(self._view ^ that) + if hexversion < 0x03000000: + def isdisjoint(self, that): + """Return True if and only if *that* is disjoint with self.""" + # pylint: disable=arguments-differ + return not any(key in self._list for key in that) + else: + def isdisjoint(self, that): + """Return True if and only if *that* is disjoint with self.""" + # pylint: disable=arguments-differ + return self._view.isdisjoint(that) + @recursive_repr + def __repr__(self): + return 'SortedDict_keys({0!r})'.format(list(self)) + + +class ValuesView(AbstractValuesView, Sequence): + """ + A ValuesView object is a dynamic view of the dictionary's values, which + means that when the dictionary's values change, the view reflects those + changes. + + The ValuesView class implements the Sequence Abstract Base Class. + """ + # pylint: disable=too-many-ancestors + if hexversion < 0x03000000: + def __init__(self, sorted_dict): + """ + Initialize a ValuesView from a SortedDict container as + *sorted_dict*. + """ + # pylint: disable=super-init-not-called, protected-access + self._dict = sorted_dict + self._list = sorted_dict._list + self._view = sorted_dict._dict.viewvalues() + else: + def __init__(self, sorted_dict): + """ + Initialize a ValuesView from a SortedDict container as + *sorted_dict*. + """ + # pylint: disable=super-init-not-called, protected-access + self._dict = sorted_dict + self._list = sorted_dict._list + self._view = sorted_dict._dict.values() + def __len__(self): + """Return the number of entries in the dictionary.""" + return len(self._dict) + def __contains__(self, value): + """ + Return True if and only if *value* is in the underlying Mapping's + values. + """ + return value in self._view + def __iter__(self): + """ + Return an iterator over the values in the dictionary. Values are + iterated over in sorted order of the keys. + + Iterating views while adding or deleting entries in the dictionary may + raise a `RuntimeError` or fail to iterate over all entries. + """ + _dict = self._dict + return iter(_dict[key] for key in self._list) + def __getitem__(self, index): + """ + Efficiently return value at *index* in iteration. + + Supports slice notation and negative indexes. + """ + _dict, _list = self._dict, self._list + if isinstance(index, slice): + return [_dict[key] for key in _list[index]] + return _dict[_list[index]] + def __reversed__(self): + """ + Return a reverse iterator over the values in the dictionary. Values are + iterated over in reverse sort order of the keys. + + Iterating views while adding or deleting entries in the dictionary may + raise a `RuntimeError` or fail to iterate over all entries. + """ + _dict = self._dict + return iter(_dict[key] for key in reversed(self._list)) + def index(self, value): + """ + Return index of *value* in self. + + Raises ValueError if *value* is not found. + """ + # pylint: disable=arguments-differ + for idx, val in enumerate(self): + if value == val: + return idx + raise ValueError('{0!r} is not in dict'.format(value)) + if hexversion < 0x03000000: + def count(self, value): + """Return the number of occurrences of *value* in self.""" + return sum(1 for val in self._dict.itervalues() if val == value) + else: + def count(self, value): + """Return the number of occurrences of *value* in self.""" + return sum(1 for val in self._dict.values() if val == value) + def __lt__(self, that): + raise TypeError + def __gt__(self, that): + raise TypeError + def __le__(self, that): + raise TypeError + def __ge__(self, that): + raise TypeError + def __and__(self, that): + raise TypeError + def __or__(self, that): + raise TypeError + def __sub__(self, that): + raise TypeError + def __xor__(self, that): + raise TypeError + @recursive_repr + def __repr__(self): + return 'SortedDict_values({0!r})'.format(list(self)) + + +class ItemsView(AbstractItemsView, Set, Sequence): + """ + An ItemsView object is a dynamic view of the dictionary's ``(key, + value)`` pairs, which means that when the dictionary changes, the + view reflects those changes. + + The ItemsView class implements the Set and Sequence Abstract Base Classes. + However, the set-like operations (``&``, ``|``, ``-``, ``^``) will only + operate correctly if all of the dictionary's values are hashable. + """ + # pylint: disable=too-many-ancestors + if hexversion < 0x03000000: + def __init__(self, sorted_dict): + """ + Initialize an ItemsView from a SortedDict container as + *sorted_dict*. + """ + # pylint: disable=super-init-not-called, protected-access + self._dict = sorted_dict + self._list = sorted_dict._list + self._view = sorted_dict._dict.viewitems() + else: + def __init__(self, sorted_dict): + """ + Initialize an ItemsView from a SortedDict container as + *sorted_dict*. + """ + # pylint: disable=super-init-not-called, protected-access + self._dict = sorted_dict + self._list = sorted_dict._list + self._view = sorted_dict._dict.items() + def __len__(self): + """Return the number of entries in the dictionary.""" + return len(self._view) + def __contains__(self, key): + """ + Return True if and only if *key* is one of the underlying dictionary's + items. + """ + return key in self._view + def __iter__(self): + """ + Return an iterable over the items in the dictionary. Items are iterated + over in their sorted order. + + Iterating views while adding or deleting entries in the dictionary may + raise a `RuntimeError` or fail to iterate over all entries. + """ + _dict = self._dict + return iter((key, _dict[key]) for key in self._list) + def __getitem__(self, index): + """Return the item as position *index*.""" + _dict, _list = self._dict, self._list + if isinstance(index, slice): + return [(key, _dict[key]) for key in _list[index]] + key = _list[index] + return (key, _dict[key]) + def __reversed__(self): + """ + Return a reversed iterable over the items in the dictionary. Items are + iterated over in their reverse sort order. + + Iterating views while adding or deleting entries in the dictionary may + raise a RuntimeError or fail to iterate over all entries. + """ + _dict = self._dict + return iter((key, _dict[key]) for key in reversed(self._list)) + def index(self, key, start=None, stop=None): + """ + Return the smallest *k* such that `itemssview[k] == key` and `start <= k + < end`. Raises `KeyError` if *key* is not present. *stop* defaults + to the end of the set. *start* defaults to the beginning. Negative + indexes are supported, as for slice indices. + """ + # pylint: disable=arguments-differ + temp, value = key + pos = self._list.index(temp, start, stop) + if value == self._dict[temp]: + return pos + else: + raise ValueError('{0!r} is not in dict'.format(key)) + def count(self, item): + """Return the number of occurrences of *item* in the set.""" + # pylint: disable=arguments-differ + key, value = item + return 1 if key in self._dict and self._dict[key] == value else 0 + def __eq__(self, that): + """Test set-like equality with *that*.""" + return self._view == that + def __ne__(self, that): + """Test set-like inequality with *that*.""" + return self._view != that + def __lt__(self, that): + """Test whether self is a proper subset of *that*.""" + return self._view < that + def __gt__(self, that): + """Test whether self is a proper superset of *that*.""" + return self._view > that + def __le__(self, that): + """Test whether self is contained within *that*.""" + return self._view <= that + def __ge__(self, that): + """Test whether *that* is contained within self.""" + return self._view >= that + def __and__(self, that): + """Return a SortedSet of the intersection of self and *that*.""" + return SortedSet(self._view & that) + def __or__(self, that): + """Return a SortedSet of the union of self and *that*.""" + return SortedSet(self._view | that) + def __sub__(self, that): + """Return a SortedSet of the difference of self and *that*.""" + return SortedSet(self._view - that) + def __xor__(self, that): + """Return a SortedSet of the symmetric difference of self and *that*.""" + return SortedSet(self._view ^ that) + if hexversion < 0x03000000: + def isdisjoint(self, that): + """Return True if and only if *that* is disjoint with self.""" + # pylint: disable=arguments-differ + _dict = self._dict + for key, value in that: + if key in _dict and _dict[key] == value: + return False + return True + else: + def isdisjoint(self, that): + """Return True if and only if *that* is disjoint with self.""" + # pylint: disable=arguments-differ + return self._view.isdisjoint(that) + @recursive_repr + def __repr__(self): + return 'SortedDict_items({0!r})'.format(list(self)) diff --git a/src/rez/vendor/sortedcontainers/sortedlist.py b/src/rez/vendor/sortedcontainers/sortedlist.py new file mode 100644 index 000000000..ecbdadcf1 --- /dev/null +++ b/src/rez/vendor/sortedcontainers/sortedlist.py @@ -0,0 +1,2492 @@ +"""Sorted list implementation. + +""" +# pylint: disable=redefined-builtin, ungrouped-imports + +from __future__ import print_function + +from bisect import bisect_left, bisect_right, insort +from collections import Sequence, MutableSequence +from functools import wraps +from itertools import chain, repeat, starmap +from math import log as log_e +import operator as op +from operator import iadd, add +from sys import hexversion + +if hexversion < 0x03000000: + from itertools import izip as zip # pylint: disable=no-name-in-module + from itertools import imap as map # pylint: disable=no-name-in-module + try: + from thread import get_ident + except ImportError: + from dummy_thread import get_ident +else: + from functools import reduce + try: + from _thread import get_ident + except ImportError: + from _dummy_thread import get_ident # pylint: disable=import-error + +LOAD = 1000 + +def recursive_repr(func): + """Decorator to prevent infinite repr recursion.""" + repr_running = set() + + @wraps(func) + def wrapper(self): + "Return ellipsis on recursive re-entry to function." + key = id(self), get_ident() + + if key in repr_running: + return '...' + + repr_running.add(key) + + try: + return func(self) + finally: + repr_running.discard(key) + + return wrapper + +class SortedList(MutableSequence): + """ + SortedList provides most of the same methods as a list but keeps the items + in sorted order. + """ + # pylint: disable=too-many-ancestors + def __init__(self, iterable=None): + """ + SortedList provides most of the same methods as a list but keeps the + items in sorted order. + + An optional *iterable* provides an initial series of items to populate + the SortedList. + """ + self._len = 0 + self._lists = [] + self._maxes = [] + self._index = [] + self._load = LOAD + self._half = LOAD >> 1 + self._dual = LOAD << 1 + self._offset = 0 + + if iterable is not None: + self._update(iterable) + + def __new__(cls, iterable=None, key=None): + """ + SortedList provides most of the same methods as a list but keeps the + items in sorted order. + + An optional *iterable* provides an initial series of items to populate + the SortedList. + + An optional *key* argument will return an instance of subtype + SortedListWithKey. + """ + # pylint: disable=unused-argument + if key is None: + return object.__new__(cls) + else: + if cls is SortedList: + return object.__new__(SortedListWithKey) + else: + raise TypeError('inherit SortedListWithKey for key argument') + + @property + def key(self): + """Key function used to extract comparison key for sorting.""" + return None + + def _reset(self, load): + """ + Reset sorted list load. + + The *load* specifies the load-factor of the list. The default load + factor of '1000' works well for lists from tens to tens of millions of + elements. Good practice is to use a value that is the cube root of the + list size. With billions of elements, the best load factor depends on + your usage. It's best to leave the load factor at the default until + you start benchmarking. + """ + values = reduce(iadd, self._lists, []) + self._clear() + self._load = load + self._half = load >> 1 + self._dual = load << 1 + self._update(values) + + def clear(self): + """Remove all the elements from the list.""" + self._len = 0 + del self._lists[:] + del self._maxes[:] + del self._index[:] + + _clear = clear + + def add(self, val): + """Add the element *val* to the list.""" + _lists = self._lists + _maxes = self._maxes + + if _maxes: + pos = bisect_right(_maxes, val) + + if pos == len(_maxes): + pos -= 1 + _lists[pos].append(val) + _maxes[pos] = val + else: + insort(_lists[pos], val) + + self._expand(pos) + else: + _lists.append([val]) + _maxes.append(val) + + self._len += 1 + + def _expand(self, pos): + """Splits sublists that are more than double the load level. + + Updates the index when the sublist length is less than double the load + level. This requires incrementing the nodes in a traversal from the + leaf node to the root. For an example traversal see self._loc. + + """ + _lists = self._lists + _index = self._index + + if len(_lists[pos]) > self._dual: + _maxes = self._maxes + _load = self._load + + _lists_pos = _lists[pos] + half = _lists_pos[_load:] + del _lists_pos[_load:] + _maxes[pos] = _lists_pos[-1] + + _lists.insert(pos + 1, half) + _maxes.insert(pos + 1, half[-1]) + + del _index[:] + else: + if _index: + child = self._offset + pos + while child: + _index[child] += 1 + child = (child - 1) >> 1 + _index[0] += 1 + + def update(self, iterable): + """Update the list by adding all elements from *iterable*.""" + _lists = self._lists + _maxes = self._maxes + values = sorted(iterable) + + if _maxes: + if len(values) * 4 >= self._len: + values.extend(chain.from_iterable(_lists)) + values.sort() + self._clear() + else: + _add = self.add + for val in values: + _add(val) + return + + _load = self._load + _lists.extend(values[pos:(pos + _load)] + for pos in range(0, len(values), _load)) + _maxes.extend(sublist[-1] for sublist in _lists) + self._len = len(values) + del self._index[:] + + _update = update + + def __contains__(self, val): + """Return True if and only if *val* is an element in the list.""" + _maxes = self._maxes + + if not _maxes: + return False + + pos = bisect_left(_maxes, val) + + if pos == len(_maxes): + return False + + _lists = self._lists + idx = bisect_left(_lists[pos], val) + + return _lists[pos][idx] == val + + def discard(self, val): + """ + Remove the first occurrence of *val*. + + If *val* is not a member, does nothing. + """ + _maxes = self._maxes + + if not _maxes: + return + + pos = bisect_left(_maxes, val) + + if pos == len(_maxes): + return + + _lists = self._lists + idx = bisect_left(_lists[pos], val) + + if _lists[pos][idx] == val: + self._delete(pos, idx) + + def remove(self, val): + """ + Remove first occurrence of *val*. + + Raises ValueError if *val* is not present. + """ + # pylint: disable=arguments-differ + _maxes = self._maxes + + if not _maxes: + raise ValueError('{0!r} not in list'.format(val)) + + pos = bisect_left(_maxes, val) + + if pos == len(_maxes): + raise ValueError('{0!r} not in list'.format(val)) + + _lists = self._lists + idx = bisect_left(_lists[pos], val) + + if _lists[pos][idx] == val: + self._delete(pos, idx) + else: + raise ValueError('{0!r} not in list'.format(val)) + + def _delete(self, pos, idx): + """Delete the item at the given (pos, idx). + + Combines lists that are less than half the load level. + + Updates the index when the sublist length is more than half the load + level. This requires decrementing the nodes in a traversal from the leaf + node to the root. For an example traversal see self._loc. + """ + _lists = self._lists + _maxes = self._maxes + _index = self._index + + _lists_pos = _lists[pos] + + del _lists_pos[idx] + self._len -= 1 + + len_lists_pos = len(_lists_pos) + + if len_lists_pos > self._half: + + _maxes[pos] = _lists_pos[-1] + + if _index: + child = self._offset + pos + while child > 0: + _index[child] -= 1 + child = (child - 1) >> 1 + _index[0] -= 1 + + elif len(_lists) > 1: + + if not pos: + pos += 1 + + prev = pos - 1 + _lists[prev].extend(_lists[pos]) + _maxes[prev] = _lists[prev][-1] + + del _lists[pos] + del _maxes[pos] + del _index[:] + + self._expand(prev) + + elif len_lists_pos: + + _maxes[pos] = _lists_pos[-1] + + else: + + del _lists[pos] + del _maxes[pos] + del _index[:] + + def _loc(self, pos, idx): + """Convert an index pair (alpha, beta) into a single index that corresponds to + the position of the value in the sorted list. + + Most queries require the index be built. Details of the index are + described in self._build_index. + + Indexing requires traversing the tree from a leaf node to the root. The + parent of each node is easily computable at (pos - 1) // 2. + + Left-child nodes are always at odd indices and right-child nodes are + always at even indices. + + When traversing up from a right-child node, increment the total by the + left-child node. + + The final index is the sum from traversal and the index in the sublist. + + For example, using the index from self._build_index: + + _index = 14 5 9 3 2 4 5 + _offset = 3 + + Tree: + + 14 + 5 9 + 3 2 4 5 + + Converting index pair (2, 3) into a single index involves iterating like + so: + + 1. Starting at the leaf node: offset + alpha = 3 + 2 = 5. We identify + the node as a left-child node. At such nodes, we simply traverse to + the parent. + + 2. At node 9, position 2, we recognize the node as a right-child node + and accumulate the left-child in our total. Total is now 5 and we + traverse to the parent at position 0. + + 3. Iteration ends at the root. + + Computing the index is the sum of the total and beta: 5 + 3 = 8. + """ + if not pos: + return idx + + _index = self._index + + if not _index: + self._build_index() + + total = 0 + + # Increment pos to point in the index to len(self._lists[pos]). + + pos += self._offset + + # Iterate until reaching the root of the index tree at pos = 0. + + while pos: + + # Right-child nodes are at odd indices. At such indices + # account the total below the left child node. + + if not pos & 1: + total += _index[pos - 1] + + # Advance pos to the parent node. + + pos = (pos - 1) >> 1 + + return total + idx + + def _pos(self, idx): + """Convert an index into a pair (alpha, beta) that can be used to access + the corresponding _lists[alpha][beta] position. + + Most queries require the index be built. Details of the index are + described in self._build_index. + + Indexing requires traversing the tree to a leaf node. Each node has + two children which are easily computable. Given an index, pos, the + left-child is at pos * 2 + 1 and the right-child is at pos * 2 + 2. + + When the index is less than the left-child, traversal moves to the + left sub-tree. Otherwise, the index is decremented by the left-child + and traversal moves to the right sub-tree. + + At a child node, the indexing pair is computed from the relative + position of the child node as compared with the offset and the remaining + index. + + For example, using the index from self._build_index: + + _index = 14 5 9 3 2 4 5 + _offset = 3 + + Tree: + + 14 + 5 9 + 3 2 4 5 + + Indexing position 8 involves iterating like so: + + 1. Starting at the root, position 0, 8 is compared with the left-child + node (5) which it is greater than. When greater the index is + decremented and the position is updated to the right child node. + + 2. At node 9 with index 3, we again compare the index to the left-child + node with value 4. Because the index is the less than the left-child + node, we simply traverse to the left. + + 3. At node 4 with index 3, we recognize that we are at a leaf node and + stop iterating. + + 4. To compute the sublist index, we subtract the offset from the index + of the leaf node: 5 - 3 = 2. To compute the index in the sublist, we + simply use the index remaining from iteration. In this case, 3. + + The final index pair from our example is (2, 3) which corresponds to + index 8 in the sorted list. + """ + if idx < 0: + last_len = len(self._lists[-1]) + + if (-idx) <= last_len: + return len(self._lists) - 1, last_len + idx + + idx += self._len + + if idx < 0: + raise IndexError('list index out of range') + elif idx >= self._len: + raise IndexError('list index out of range') + + if idx < len(self._lists[0]): + return 0, idx + + _index = self._index + + if not _index: + self._build_index() + + pos = 0 + child = 1 + len_index = len(_index) + + while child < len_index: + index_child = _index[child] + + if idx < index_child: + pos = child + else: + idx -= index_child + pos = child + 1 + + child = (pos << 1) + 1 + + return (pos - self._offset, idx) + + def _build_index(self): + """Build an index for indexing the sorted list. + + Indexes are represented as binary trees in a dense array notation + similar to a binary heap. + + For example, given a _lists representation storing integers: + + [0]: 1 2 3 + [1]: 4 5 + [2]: 6 7 8 9 + [3]: 10 11 12 13 14 + + The first transformation maps the sub-lists by their length. The + first row of the index is the length of the sub-lists. + + [0]: 3 2 4 5 + + Each row after that is the sum of consecutive pairs of the previous row: + + [1]: 5 9 + [2]: 14 + + Finally, the index is built by concatenating these lists together: + + _index = 14 5 9 3 2 4 5 + + An offset storing the start of the first row is also stored: + + _offset = 3 + + When built, the index can be used for efficient indexing into the list. + See the comment and notes on self._pos for details. + """ + row0 = list(map(len, self._lists)) + + if len(row0) == 1: + self._index[:] = row0 + self._offset = 0 + return + + head = iter(row0) + tail = iter(head) + row1 = list(starmap(add, zip(head, tail))) + + if len(row0) & 1: + row1.append(row0[-1]) + + if len(row1) == 1: + self._index[:] = row1 + row0 + self._offset = 1 + return + + size = 2 ** (int(log_e(len(row1) - 1, 2)) + 1) + row1.extend(repeat(0, size - len(row1))) + tree = [row0, row1] + + while len(tree[-1]) > 1: + head = iter(tree[-1]) + tail = iter(head) + row = list(starmap(add, zip(head, tail))) + tree.append(row) + + reduce(iadd, reversed(tree), self._index) + self._offset = size * 2 - 1 + + def __delitem__(self, idx): + """Remove the element at *idx*. Supports slicing.""" + if isinstance(idx, slice): + start, stop, step = idx.indices(self._len) + + if step == 1 and start < stop: + if start == 0 and stop == self._len: + return self._clear() + elif self._len <= 8 * (stop - start): + values = self._getitem(slice(None, start)) + if stop < self._len: + values += self._getitem(slice(stop, None)) + self._clear() + return self._update(values) + + indices = range(start, stop, step) + + # Delete items from greatest index to least so + # that the indices remain valid throughout iteration. + + if step > 0: + indices = reversed(indices) + + _pos, _delete = self._pos, self._delete + + for index in indices: + pos, idx = _pos(index) + _delete(pos, idx) + else: + pos, idx = self._pos(idx) + self._delete(pos, idx) + + _delitem = __delitem__ + + def __getitem__(self, idx): + """Return the element at *idx*. Supports slicing.""" + _lists = self._lists + + if isinstance(idx, slice): + start, stop, step = idx.indices(self._len) + + if step == 1 and start < stop: + if start == 0 and stop == self._len: + return reduce(iadd, self._lists, []) + + start_pos, start_idx = self._pos(start) + + if stop == self._len: + stop_pos = len(_lists) - 1 + stop_idx = len(_lists[stop_pos]) + else: + stop_pos, stop_idx = self._pos(stop) + + if start_pos == stop_pos: + return _lists[start_pos][start_idx:stop_idx] + + prefix = _lists[start_pos][start_idx:] + middle = _lists[(start_pos + 1):stop_pos] + result = reduce(iadd, middle, prefix) + result += _lists[stop_pos][:stop_idx] + + return result + + if step == -1 and start > stop: + result = self._getitem(slice(stop + 1, start + 1)) + result.reverse() + return result + + # Return a list because a negative step could + # reverse the order of the items and this could + # be the desired behavior. + + indices = range(start, stop, step) + return list(self._getitem(index) for index in indices) + else: + if self._len: + if idx == 0: + return _lists[0][0] + elif idx == -1: + return _lists[-1][-1] + else: + raise IndexError('list index out of range') + + if 0 <= idx < len(_lists[0]): + return _lists[0][idx] + + len_last = len(_lists[-1]) + + if -len_last < idx < 0: + return _lists[-1][len_last + idx] + + pos, idx = self._pos(idx) + return _lists[pos][idx] + + _getitem = __getitem__ + + def _check_order(self, idx, val): + _len = self._len + _lists = self._lists + + pos, loc = self._pos(idx) + + if idx < 0: + idx += _len + + # Check that the inserted value is not less than the + # previous value. + + if idx > 0: + idx_prev = loc - 1 + pos_prev = pos + + if idx_prev < 0: + pos_prev -= 1 + idx_prev = len(_lists[pos_prev]) - 1 + + if _lists[pos_prev][idx_prev] > val: + msg = '{0!r} not in sort order at index {1}'.format(val, idx) + raise ValueError(msg) + + # Check that the inserted value is not greater than + # the previous value. + + if idx < (_len - 1): + idx_next = loc + 1 + pos_next = pos + + if idx_next == len(_lists[pos_next]): + pos_next += 1 + idx_next = 0 + + if _lists[pos_next][idx_next] < val: + msg = '{0!r} not in sort order at index {1}'.format(val, idx) + raise ValueError(msg) + + def __setitem__(self, index, value): + """Replace item at position *index* with *value*. + + Supports slice notation. Raises :exc:`ValueError` if the sort order + would be violated. When used with a slice and iterable, the + :exc:`ValueError` is raised before the list is mutated if the sort + order would be violated by the operation. + + """ + _lists = self._lists + _maxes = self._maxes + _check_order = self._check_order + _pos = self._pos + + if isinstance(index, slice): + _len = self._len + start, stop, step = index.indices(_len) + indices = range(start, stop, step) + + # Copy value to avoid aliasing issues with self and cases where an + # iterator is given. + + values = tuple(value) + + if step != 1: + if len(values) != len(indices): + raise ValueError( + 'attempt to assign sequence of size %s' + ' to extended slice of size %s' + % (len(values), len(indices))) + + # Keep a log of values that are set so that we can + # roll back changes if ordering is violated. + + log = [] + _append = log.append + + for idx, val in zip(indices, values): + pos, loc = _pos(idx) + _append((idx, _lists[pos][loc], val)) + _lists[pos][loc] = val + if len(_lists[pos]) == (loc + 1): + _maxes[pos] = val + + try: + # Validate ordering of new values. + + for idx, _, newval in log: + _check_order(idx, newval) + + except ValueError: + + # Roll back changes from log. + + for idx, oldval, _ in log: + pos, loc = _pos(idx) + _lists[pos][loc] = oldval + if len(_lists[pos]) == (loc + 1): + _maxes[pos] = oldval + + raise + else: + if start == 0 and stop == _len: + self._clear() + return self._update(values) + + if stop < start: + # When calculating indices, stop may be less than start. + # For example: ...[5:3:1] results in slice(5, 3, 1) which + # is a valid but not useful stop index. + stop = start + + if values: + + # Check that given values are ordered properly. + + alphas = iter(values) + betas = iter(values) + next(betas) + pairs = zip(alphas, betas) + + if not all(alpha <= beta for alpha, beta in pairs): + raise ValueError('given values not in sort order') + + # Check ordering in context of sorted list. + + if start and self._getitem(start - 1) > values[0]: + message = '{0!r} not in sort order at index {1}'.format( + values[0], start) + raise ValueError(message) + + if stop != _len and self._getitem(stop) < values[-1]: + message = '{0!r} not in sort order at index {1}'.format( + values[-1], stop) + raise ValueError(message) + + # Delete the existing values. + + self._delitem(index) + + # Insert the new values. + + _insert = self.insert + for idx, val in enumerate(values): + _insert(start + idx, val) + else: + pos, loc = _pos(index) + _check_order(index, value) + _lists[pos][loc] = value + if len(_lists[pos]) == (loc + 1): + _maxes[pos] = value + + def __iter__(self): + """ + Return an iterator over the Sequence. + + Iterating the Sequence while adding or deleting values may raise a + `RuntimeError` or fail to iterate over all entries. + """ + return chain.from_iterable(self._lists) + + def __reversed__(self): + """ + Return an iterator to traverse the Sequence in reverse. + + Iterating the Sequence while adding or deleting values may raise a + `RuntimeError` or fail to iterate over all entries. + """ + return chain.from_iterable(map(reversed, reversed(self._lists))) + + def islice(self, start=None, stop=None, reverse=False): + """ + Returns an iterator that slices `self` from `start` to `stop` index, + inclusive and exclusive respectively. + + When `reverse` is `True`, values are yielded from the iterator in + reverse order. + + Both `start` and `stop` default to `None` which is automatically + inclusive of the beginning and end. + """ + _len = self._len + + if not _len: + return iter(()) + + start, stop, _ = slice(start, stop).indices(self._len) + + if start >= stop: + return iter(()) + + _pos = self._pos + + min_pos, min_idx = _pos(start) + + if stop == _len: + max_pos = len(self._lists) - 1 + max_idx = len(self._lists[-1]) + else: + max_pos, max_idx = _pos(stop) + + return self._islice(min_pos, min_idx, max_pos, max_idx, reverse) + + def _islice(self, min_pos, min_idx, max_pos, max_idx, reverse): + """ + Returns an iterator that slices `self` using two index pairs, + `(min_pos, min_idx)` and `(max_pos, max_idx)`; the first inclusive + and the latter exclusive. See `_pos` for details on how an index + is converted to an index pair. + + When `reverse` is `True`, values are yielded from the iterator in + reverse order. + """ + _lists = self._lists + + if min_pos > max_pos: + return iter(()) + elif min_pos == max_pos and not reverse: + return iter(_lists[min_pos][min_idx:max_idx]) + elif min_pos == max_pos and reverse: + return reversed(_lists[min_pos][min_idx:max_idx]) + elif min_pos + 1 == max_pos and not reverse: + return chain(_lists[min_pos][min_idx:], _lists[max_pos][:max_idx]) + elif min_pos + 1 == max_pos and reverse: + return chain( + reversed(_lists[max_pos][:max_idx]), + reversed(_lists[min_pos][min_idx:]), + ) + elif not reverse: + return chain( + _lists[min_pos][min_idx:], + chain.from_iterable(_lists[(min_pos + 1):max_pos]), + _lists[max_pos][:max_idx], + ) + + temp = map(reversed, reversed(_lists[(min_pos + 1):max_pos])) + return chain( + reversed(_lists[max_pos][:max_idx]), + chain.from_iterable(temp), + reversed(_lists[min_pos][min_idx:]), + ) + + def irange(self, minimum=None, maximum=None, inclusive=(True, True), + reverse=False): + """ + Create an iterator of values between `minimum` and `maximum`. + + `inclusive` is a pair of booleans that indicates whether the minimum + and maximum ought to be included in the range, respectively. The + default is (True, True) such that the range is inclusive of both + minimum and maximum. + + Both `minimum` and `maximum` default to `None` which is automatically + inclusive of the start and end of the list, respectively. + + When `reverse` is `True` the values are yielded from the iterator in + reverse order; `reverse` defaults to `False`. + """ + _maxes = self._maxes + + if not _maxes: + return iter(()) + + _lists = self._lists + + # Calculate the minimum (pos, idx) pair. By default this location + # will be inclusive in our calculation. + + if minimum is None: + min_pos = 0 + min_idx = 0 + else: + if inclusive[0]: + min_pos = bisect_left(_maxes, minimum) + + if min_pos == len(_maxes): + return iter(()) + + min_idx = bisect_left(_lists[min_pos], minimum) + else: + min_pos = bisect_right(_maxes, minimum) + + if min_pos == len(_maxes): + return iter(()) + + min_idx = bisect_right(_lists[min_pos], minimum) + + # Calculate the maximum (pos, idx) pair. By default this location + # will be exclusive in our calculation. + + if maximum is None: + max_pos = len(_maxes) - 1 + max_idx = len(_lists[max_pos]) + else: + if inclusive[1]: + max_pos = bisect_right(_maxes, maximum) + + if max_pos == len(_maxes): + max_pos -= 1 + max_idx = len(_lists[max_pos]) + else: + max_idx = bisect_right(_lists[max_pos], maximum) + else: + max_pos = bisect_left(_maxes, maximum) + + if max_pos == len(_maxes): + max_pos -= 1 + max_idx = len(_lists[max_pos]) + else: + max_idx = bisect_left(_lists[max_pos], maximum) + + return self._islice(min_pos, min_idx, max_pos, max_idx, reverse) + + def __len__(self): + """Return the number of elements in the list.""" + return self._len + + def bisect_left(self, val): + """ + Similar to the *bisect* module in the standard library, this returns an + appropriate index to insert *val*. If *val* is already present, the + insertion point will be before (to the left of) any existing entries. + """ + _maxes = self._maxes + + if not _maxes: + return 0 + + pos = bisect_left(_maxes, val) + + if pos == len(_maxes): + return self._len + + idx = bisect_left(self._lists[pos], val) + + return self._loc(pos, idx) + + def bisect_right(self, val): + """ + Same as *bisect_left*, but if *val* is already present, the insertion + point will be after (to the right of) any existing entries. + """ + _maxes = self._maxes + + if not _maxes: + return 0 + + pos = bisect_right(_maxes, val) + + if pos == len(_maxes): + return self._len + + idx = bisect_right(self._lists[pos], val) + + return self._loc(pos, idx) + + bisect = bisect_right + _bisect_right = bisect_right + + def count(self, val): + """Return the number of occurrences of *val* in the list.""" + # pylint: disable=arguments-differ + _maxes = self._maxes + + if not _maxes: + return 0 + + pos_left = bisect_left(_maxes, val) + + if pos_left == len(_maxes): + return 0 + + _lists = self._lists + idx_left = bisect_left(_lists[pos_left], val) + pos_right = bisect_right(_maxes, val) + + if pos_right == len(_maxes): + return self._len - self._loc(pos_left, idx_left) + + idx_right = bisect_right(_lists[pos_right], val) + + if pos_left == pos_right: + return idx_right - idx_left + + right = self._loc(pos_right, idx_right) + left = self._loc(pos_left, idx_left) + + return right - left + + def copy(self): + """Return a shallow copy of the sorted list.""" + return self.__class__(self) + + __copy__ = copy + + def append(self, val): + """ + Append the element *val* to the list. Raises a ValueError if the *val* + would violate the sort order. + """ + # pylint: disable=arguments-differ + _lists = self._lists + _maxes = self._maxes + + if not _maxes: + _maxes.append(val) + _lists.append([val]) + self._len = 1 + return + + pos = len(_lists) - 1 + + if val < _lists[pos][-1]: + msg = '{0!r} not in sort order at index {1}'.format(val, self._len) + raise ValueError(msg) + + _maxes[pos] = val + _lists[pos].append(val) + self._len += 1 + self._expand(pos) + + def extend(self, values): + """ + Extend the list by appending all elements from the *values*. Raises a + ValueError if the sort order would be violated. + """ + _lists = self._lists + _maxes = self._maxes + _load = self._load + + if not isinstance(values, list): + values = list(values) + + if not values: + return + + if any(values[pos - 1] > values[pos] + for pos in range(1, len(values))): + raise ValueError('given sequence not in sort order') + + offset = 0 + + if _maxes: + if values[0] < _lists[-1][-1]: + msg = '{0!r} not in sort order at index {1}'.format(values[0], self._len) + raise ValueError(msg) + + if len(_lists[-1]) < self._half: + _lists[-1].extend(values[:_load]) + _maxes[-1] = _lists[-1][-1] + offset = _load + + len_lists = len(_lists) + + for idx in range(offset, len(values), _load): + _lists.append(values[idx:(idx + _load)]) + _maxes.append(_lists[-1][-1]) + + _index = self._index + + if len_lists == len(_lists): + len_index = len(_index) + if len_index > 0: + len_values = len(values) + child = len_index - 1 + while child: + _index[child] += len_values + child = (child - 1) >> 1 + _index[0] += len_values + else: + del _index[:] + + self._len += len(values) + + def insert(self, idx, val): + """ + Insert the element *val* into the list at *idx*. Raises a ValueError if + the *val* at *idx* would violate the sort order. + """ + # pylint: disable=arguments-differ + _len = self._len + _lists = self._lists + _maxes = self._maxes + + if idx < 0: + idx += _len + if idx < 0: + idx = 0 + if idx > _len: + idx = _len + + if not _maxes: + # The idx must be zero by the inequalities above. + _maxes.append(val) + _lists.append([val]) + self._len = 1 + return + + if not idx: + if val > _lists[0][0]: + msg = '{0!r} not in sort order at index {1}'.format(val, 0) + raise ValueError(msg) + else: + _lists[0].insert(0, val) + self._expand(0) + self._len += 1 + return + + if idx == _len: + pos = len(_lists) - 1 + if _lists[pos][-1] > val: + msg = '{0!r} not in sort order at index {1}'.format(val, _len) + raise ValueError(msg) + else: + _lists[pos].append(val) + _maxes[pos] = _lists[pos][-1] + self._expand(pos) + self._len += 1 + return + + pos, idx = self._pos(idx) + idx_before = idx - 1 + if idx_before < 0: + pos_before = pos - 1 + idx_before = len(_lists[pos_before]) - 1 + else: + pos_before = pos + + before = _lists[pos_before][idx_before] + if before <= val <= _lists[pos][idx]: + _lists[pos].insert(idx, val) + self._expand(pos) + self._len += 1 + else: + msg = '{0!r} not in sort order at index {1}'.format(val, idx) + raise ValueError(msg) + + def pop(self, idx=-1): + """ + Remove and return item at *idx* (default last). Raises IndexError if + list is empty or index is out of range. Negative indices are supported, + as for slice indices. + """ + # pylint: disable=arguments-differ + if not self._len: + raise IndexError('pop index out of range') + + _lists = self._lists + + if idx == 0: + val = _lists[0][0] + self._delete(0, 0) + return val + + if idx == -1: + pos = len(_lists) - 1 + loc = len(_lists[pos]) - 1 + val = _lists[pos][loc] + self._delete(pos, loc) + return val + + if 0 <= idx < len(_lists[0]): + val = _lists[0][idx] + self._delete(0, idx) + return val + + len_last = len(_lists[-1]) + + if -len_last < idx < 0: + pos = len(_lists) - 1 + loc = len_last + idx + val = _lists[pos][loc] + self._delete(pos, loc) + return val + + pos, idx = self._pos(idx) + val = _lists[pos][idx] + self._delete(pos, idx) + + return val + + def index(self, val, start=None, stop=None): + """ + Return the smallest *k* such that L[k] == val and i <= k < j`. Raises + ValueError if *val* is not present. *stop* defaults to the end of the + list. *start* defaults to the beginning. Negative indices are supported, + as for slice indices. + """ + # pylint: disable=arguments-differ + _len = self._len + + if not _len: + raise ValueError('{0!r} is not in list'.format(val)) + + if start is None: + start = 0 + if start < 0: + start += _len + if start < 0: + start = 0 + + if stop is None: + stop = _len + if stop < 0: + stop += _len + if stop > _len: + stop = _len + + if stop <= start: + raise ValueError('{0!r} is not in list'.format(val)) + + _maxes = self._maxes + pos_left = bisect_left(_maxes, val) + + if pos_left == len(_maxes): + raise ValueError('{0!r} is not in list'.format(val)) + + _lists = self._lists + idx_left = bisect_left(_lists[pos_left], val) + + if _lists[pos_left][idx_left] != val: + raise ValueError('{0!r} is not in list'.format(val)) + + stop -= 1 + left = self._loc(pos_left, idx_left) + + if start <= left: + if left <= stop: + return left + else: + right = self._bisect_right(val) - 1 + + if start <= right: + return start + + raise ValueError('{0!r} is not in list'.format(val)) + + def __add__(self, that): + """ + Return a new sorted list containing all the elements in *self* and + *that*. Elements in *that* do not need to be properly ordered with + respect to *self*. + """ + values = reduce(iadd, self._lists, []) + values.extend(that) + return self.__class__(values) + + def __iadd__(self, that): + """ + Update *self* to include all values in *that*. Elements in *that* do not + need to be properly ordered with respect to *self*. + """ + self._update(that) + return self + + def __mul__(self, that): + """ + Return a new sorted list containing *that* shallow copies of each item + in SortedList. + """ + values = reduce(iadd, self._lists, []) * that + return self.__class__(values) + + def __imul__(self, that): + """ + Increase the length of the list by appending *that* shallow copies of + each item. + """ + values = reduce(iadd, self._lists, []) * that + self._clear() + self._update(values) + return self + + def _make_cmp(self, seq_op, doc): + "Make comparator method." + def comparer(self, that): + "Compare method for sorted list and sequence." + # pylint: disable=protected-access + if not isinstance(that, Sequence): + return NotImplemented + + self_len = self._len + len_that = len(that) + + if self_len != len_that: + if seq_op is op.eq: + return False + if seq_op is op.ne: + return True + + for alpha, beta in zip(self, that): + if alpha != beta: + return seq_op(alpha, beta) + + return seq_op(self_len, len_that) + + comparer.__name__ = '__{0}__'.format(seq_op.__name__) + doc_str = 'Return `True` if and only if Sequence is {0} `that`.' + comparer.__doc__ = doc_str.format(doc) + + return comparer + + __eq__ = _make_cmp(None, op.eq, 'equal to') + __ne__ = _make_cmp(None, op.ne, 'not equal to') + __lt__ = _make_cmp(None, op.lt, 'less than') + __gt__ = _make_cmp(None, op.gt, 'greater than') + __le__ = _make_cmp(None, op.le, 'less than or equal to') + __ge__ = _make_cmp(None, op.ge, 'greater than or equal to') + + @recursive_repr + def __repr__(self): + """Return string representation of sequence.""" + return '{0}({1!r})'.format(type(self).__name__, list(self)) + + def _check(self): + try: + # Check load parameters. + + assert self._load >= 4 + assert self._half == (self._load >> 1) + assert self._dual == (self._load << 1) + + # Check empty sorted list case. + + if self._maxes == []: + assert self._lists == [] + return + + assert self._maxes and self._lists + + # Check all sublists are sorted. + + assert all(sublist[pos - 1] <= sublist[pos] + for sublist in self._lists + for pos in range(1, len(sublist))) + + # Check beginning/end of sublists are sorted. + + for pos in range(1, len(self._lists)): + assert self._lists[pos - 1][-1] <= self._lists[pos][0] + + # Check length of _maxes and _lists match. + + assert len(self._maxes) == len(self._lists) + + # Check _maxes is a map of _lists. + + assert all(self._maxes[pos] == self._lists[pos][-1] + for pos in range(len(self._maxes))) + + # Check load level is less than _dual. + + assert all(len(sublist) <= self._dual for sublist in self._lists) + + # Check load level is greater than _half for all + # but the last sublist. + + assert all(len(self._lists[pos]) >= self._half + for pos in range(0, len(self._lists) - 1)) + + # Check length. + + assert self._len == sum(len(sublist) for sublist in self._lists) + + # Check index. + + if self._index: + assert len(self._index) == self._offset + len(self._lists) + assert self._len == self._index[0] + + def test_offset_pos(pos): + "Test positional indexing offset." + from_index = self._index[self._offset + pos] + return from_index == len(self._lists[pos]) + + assert all(test_offset_pos(pos) + for pos in range(len(self._lists))) + + for pos in range(self._offset): + child = (pos << 1) + 1 + if child >= len(self._index): + assert self._index[pos] == 0 + elif child + 1 == len(self._index): + assert self._index[pos] == self._index[child] + else: + child_sum = self._index[child] + self._index[child + 1] + assert self._index[pos] == child_sum + + except: + import sys + import traceback + + traceback.print_exc(file=sys.stdout) + + print('len', self._len) + print('load', self._load, self._half, self._dual) + print('offset', self._offset) + print('len_index', len(self._index)) + print('index', self._index) + print('len_maxes', len(self._maxes)) + print('maxes', self._maxes) + print('len_lists', len(self._lists)) + print('lists', self._lists) + + raise + +def identity(value): + "Identity function." + return value + +class SortedListWithKey(SortedList): + """ + SortedListWithKey provides most of the same methods as a list but keeps + the items in sorted order. + """ + # pylint: disable=too-many-ancestors + def __init__(self, iterable=None, key=identity): + """SortedListWithKey provides most of the same methods as list but keeps the + items in sorted order. + + An optional *iterable* provides an initial series of items to populate + the SortedListWithKey. + + An optional *key* argument defines a callable that, like the `key` + argument to Python's `sorted` function, extracts a comparison key from + each element. The default is the identity function. + """ + # pylint: disable=super-init-not-called + self._len = 0 + self._lists = [] + self._keys = [] + self._maxes = [] + self._index = [] + self._key = key + self._load = LOAD + self._half = LOAD >> 1 + self._dual = LOAD << 1 + self._offset = 0 + + if iterable is not None: + self._update(iterable) + + def __new__(cls, iterable=None, key=identity): + return object.__new__(cls) + + @property + def key(self): + """Key function used to extract comparison key for sorting.""" + return self._key + + def clear(self): + """Remove all the elements from the list.""" + self._len = 0 + del self._lists[:] + del self._keys[:] + del self._maxes[:] + del self._index[:] + + _clear = clear + + def add(self, val): + """Add the element *val* to the list.""" + _lists = self._lists + _keys = self._keys + _maxes = self._maxes + + key = self._key(val) + + if _maxes: + pos = bisect_right(_maxes, key) + + if pos == len(_maxes): + pos -= 1 + _lists[pos].append(val) + _keys[pos].append(key) + _maxes[pos] = key + else: + idx = bisect_right(_keys[pos], key) + _lists[pos].insert(idx, val) + _keys[pos].insert(idx, key) + + self._expand(pos) + else: + _lists.append([val]) + _keys.append([key]) + _maxes.append(key) + + self._len += 1 + + def _expand(self, pos): + """Splits sublists that are more than double the load level. + + Updates the index when the sublist length is less than double the load + level. This requires incrementing the nodes in a traversal from the + leaf node to the root. For an example traversal see self._loc. + + """ + _lists = self._lists + _keys = self._keys + _index = self._index + + if len(_keys[pos]) > self._dual: + _maxes = self._maxes + _load = self._load + + _lists_pos = _lists[pos] + _keys_pos = _keys[pos] + half = _lists_pos[_load:] + half_keys = _keys_pos[_load:] + del _lists_pos[_load:] + del _keys_pos[_load:] + _maxes[pos] = _keys_pos[-1] + + _lists.insert(pos + 1, half) + _keys.insert(pos + 1, half_keys) + _maxes.insert(pos + 1, half_keys[-1]) + + del _index[:] + else: + if _index: + child = self._offset + pos + while child: + _index[child] += 1 + child = (child - 1) >> 1 + _index[0] += 1 + + def update(self, iterable): + """Update the list by adding all elements from *iterable*.""" + _lists = self._lists + _keys = self._keys + _maxes = self._maxes + values = sorted(iterable, key=self._key) + + if _maxes: + if len(values) * 4 >= self._len: + values.extend(chain.from_iterable(_lists)) + values.sort(key=self._key) + self._clear() + else: + _add = self.add + for val in values: + _add(val) + return + + _load = self._load + _lists.extend(values[pos:(pos + _load)] + for pos in range(0, len(values), _load)) + _keys.extend(list(map(self._key, _list)) for _list in _lists) + _maxes.extend(sublist[-1] for sublist in _keys) + self._len = len(values) + del self._index[:] + + _update = update + + def __contains__(self, val): + """Return True if and only if *val* is an element in the list.""" + _maxes = self._maxes + + if not _maxes: + return False + + key = self._key(val) + pos = bisect_left(_maxes, key) + + if pos == len(_maxes): + return False + + _lists = self._lists + _keys = self._keys + + idx = bisect_left(_keys[pos], key) + + len_keys = len(_keys) + len_sublist = len(_keys[pos]) + + while True: + if _keys[pos][idx] != key: + return False + if _lists[pos][idx] == val: + return True + idx += 1 + if idx == len_sublist: + pos += 1 + if pos == len_keys: + return False + len_sublist = len(_keys[pos]) + idx = 0 + + def discard(self, val): + """ + Remove the first occurrence of *val*. + + If *val* is not a member, does nothing. + """ + _maxes = self._maxes + + if not _maxes: + return + + key = self._key(val) + pos = bisect_left(_maxes, key) + + if pos == len(_maxes): + return + + _lists = self._lists + _keys = self._keys + idx = bisect_left(_keys[pos], key) + len_keys = len(_keys) + len_sublist = len(_keys[pos]) + + while True: + if _keys[pos][idx] != key: + return + if _lists[pos][idx] == val: + self._delete(pos, idx) + return + idx += 1 + if idx == len_sublist: + pos += 1 + if pos == len_keys: + return + len_sublist = len(_keys[pos]) + idx = 0 + + def remove(self, val): + """ + Remove first occurrence of *val*. + + Raises ValueError if *val* is not present. + """ + _maxes = self._maxes + + if not _maxes: + raise ValueError('{0!r} not in list'.format(val)) + + key = self._key(val) + pos = bisect_left(_maxes, key) + + if pos == len(_maxes): + raise ValueError('{0!r} not in list'.format(val)) + + _lists = self._lists + _keys = self._keys + idx = bisect_left(_keys[pos], key) + len_keys = len(_keys) + len_sublist = len(_keys[pos]) + + while True: + if _keys[pos][idx] != key: + raise ValueError('{0!r} not in list'.format(val)) + if _lists[pos][idx] == val: + self._delete(pos, idx) + return + idx += 1 + if idx == len_sublist: + pos += 1 + if pos == len_keys: + raise ValueError('{0!r} not in list'.format(val)) + len_sublist = len(_keys[pos]) + idx = 0 + + def _delete(self, pos, idx): + """ + Delete the item at the given (pos, idx). + + Combines lists that are less than half the load level. + + Updates the index when the sublist length is more than half the load + level. This requires decrementing the nodes in a traversal from the leaf + node to the root. For an example traversal see self._loc. + """ + _lists = self._lists + _keys = self._keys + _maxes = self._maxes + _index = self._index + keys_pos = _keys[pos] + lists_pos = _lists[pos] + + del keys_pos[idx] + del lists_pos[idx] + self._len -= 1 + + len_keys_pos = len(keys_pos) + + if len_keys_pos > self._half: + + _maxes[pos] = keys_pos[-1] + + if _index: + child = self._offset + pos + while child > 0: + _index[child] -= 1 + child = (child - 1) >> 1 + _index[0] -= 1 + + elif len(_keys) > 1: + + if not pos: + pos += 1 + + prev = pos - 1 + _keys[prev].extend(_keys[pos]) + _lists[prev].extend(_lists[pos]) + _maxes[prev] = _keys[prev][-1] + + del _lists[pos] + del _keys[pos] + del _maxes[pos] + del _index[:] + + self._expand(prev) + + elif len_keys_pos: + + _maxes[pos] = keys_pos[-1] + + else: + + del _lists[pos] + del _keys[pos] + del _maxes[pos] + del _index[:] + + def _check_order(self, idx, key, val): + # pylint: disable=arguments-differ + _len = self._len + _keys = self._keys + + pos, loc = self._pos(idx) + + if idx < 0: + idx += _len + + # Check that the inserted value is not less than the + # previous value. + + if idx > 0: + idx_prev = loc - 1 + pos_prev = pos + + if idx_prev < 0: + pos_prev -= 1 + idx_prev = len(_keys[pos_prev]) - 1 + + if _keys[pos_prev][idx_prev] > key: + msg = '{0!r} not in sort order at index {1}'.format(val, idx) + raise ValueError(msg) + + # Check that the inserted value is not greater than + # the previous value. + + if idx < (_len - 1): + idx_next = loc + 1 + pos_next = pos + + if idx_next == len(_keys[pos_next]): + pos_next += 1 + idx_next = 0 + + if _keys[pos_next][idx_next] < key: + msg = '{0!r} not in sort order at index {1}'.format(val, idx) + raise ValueError(msg) + + def __setitem__(self, index, value): + """Replace the item at position *index* with *value*. + + Supports slice notation. Raises a :exc:`ValueError` if the sort order + would be violated. When used with a slice and iterable, the + :exc:`ValueError` is raised before the list is mutated if the sort + order would be violated by the operation. + + """ + # pylint: disable=too-many-locals + _lists = self._lists + _keys = self._keys + _maxes = self._maxes + _check_order = self._check_order + _pos = self._pos + + if isinstance(index, slice): + _len = self._len + start, stop, step = index.indices(_len) + indices = range(start, stop, step) + + # Copy value to avoid aliasing issues with self and cases where an + # iterator is given. + + values = tuple(value) + + if step != 1: + if len(values) != len(indices): + raise ValueError( + 'attempt to assign sequence of size %s' + ' to extended slice of size %s' + % (len(values), len(indices))) + + # Keep a log of values that are set so that we can + # roll back changes if ordering is violated. + + log = [] + _append = log.append + + for idx, val in zip(indices, values): + pos, loc = _pos(idx) + key = self._key(val) + _append((idx, _keys[pos][loc], key, _lists[pos][loc], val)) + _keys[pos][loc] = key + _lists[pos][loc] = val + if len(_keys[pos]) == (loc + 1): + _maxes[pos] = key + + try: + # Validate ordering of new values. + + for idx, oldkey, newkey, oldval, newval in log: + _check_order(idx, newkey, newval) + + except ValueError: + + # Roll back changes from log. + + for idx, oldkey, newkey, oldval, newval in log: + pos, loc = _pos(idx) + _keys[pos][loc] = oldkey + _lists[pos][loc] = oldval + if len(_keys[pos]) == (loc + 1): + _maxes[pos] = oldkey + + raise + else: + if start == 0 and stop == self._len: + self._clear() + return self._update(values) + + if stop < start: + # When calculating indices, stop may be less than start. + # For example: ...[5:3:1] results in slice(5, 3, 1) which + # is a valid but not useful stop index. + stop = start + + if values: + + # Check that given values are ordered properly. + + keys = tuple(map(self._key, values)) + alphas = iter(keys) + betas = iter(keys) + next(betas) + pairs = zip(alphas, betas) + + if not all(alpha <= beta for alpha, beta in pairs): + raise ValueError('given values not in sort order') + + # Check ordering in context of sorted list. + + if start: + pos, loc = _pos(start - 1) + if _keys[pos][loc] > keys[0]: + msg = '{0!r} not in sort order at index {1}'.format( + values[0], start) + raise ValueError(msg) + + if stop != _len: + pos, loc = _pos(stop) + if _keys[pos][loc] < keys[-1]: + msg = '{0!r} not in sort order at index {1}'.format( + values[-1], stop) + raise ValueError(msg) + + # Delete the existing values. + + self._delitem(index) + + # Insert the new values. + + _insert = self.insert + for idx, val in enumerate(values): + _insert(start + idx, val) + else: + pos, loc = _pos(index) + key = self._key(value) + _check_order(index, key, value) + _lists[pos][loc] = value + _keys[pos][loc] = key + if len(_lists[pos]) == (loc + 1): + _maxes[pos] = key + + def irange(self, minimum=None, maximum=None, inclusive=(True, True), + reverse=False): + """ + Create an iterator of values between `minimum` and `maximum`. + + `inclusive` is a pair of booleans that indicates whether the minimum + and maximum ought to be included in the range, respectively. The + default is (True, True) such that the range is inclusive of both + minimum and maximum. + + Both `minimum` and `maximum` default to `None` which is automatically + inclusive of the start and end of the list, respectively. + + When `reverse` is `True` the values are yielded from the iterator in + reverse order; `reverse` defaults to `False`. + """ + minimum = self._key(minimum) if minimum is not None else None + maximum = self._key(maximum) if maximum is not None else None + return self._irange_key( + min_key=minimum, max_key=maximum, + inclusive=inclusive, reverse=reverse, + ) + + def irange_key(self, min_key=None, max_key=None, inclusive=(True, True), + reverse=False): + """ + Create an iterator of values between `min_key` and `max_key`. + + `inclusive` is a pair of booleans that indicates whether the min_key + and max_key ought to be included in the range, respectively. The + default is (True, True) such that the range is inclusive of both + `min_key` and `max_key`. + + Both `min_key` and `max_key` default to `None` which is automatically + inclusive of the start and end of the list, respectively. + + When `reverse` is `True` the values are yielded from the iterator in + reverse order; `reverse` defaults to `False`. + """ + _maxes = self._maxes + + if not _maxes: + return iter(()) + + _keys = self._keys + + # Calculate the minimum (pos, idx) pair. By default this location + # will be inclusive in our calculation. + + if min_key is None: + min_pos = 0 + min_idx = 0 + else: + if inclusive[0]: + min_pos = bisect_left(_maxes, min_key) + + if min_pos == len(_maxes): + return iter(()) + + min_idx = bisect_left(_keys[min_pos], min_key) + else: + min_pos = bisect_right(_maxes, min_key) + + if min_pos == len(_maxes): + return iter(()) + + min_idx = bisect_right(_keys[min_pos], min_key) + + # Calculate the maximum (pos, idx) pair. By default this location + # will be exclusive in our calculation. + + if max_key is None: + max_pos = len(_maxes) - 1 + max_idx = len(_keys[max_pos]) + else: + if inclusive[1]: + max_pos = bisect_right(_maxes, max_key) + + if max_pos == len(_maxes): + max_pos -= 1 + max_idx = len(_keys[max_pos]) + else: + max_idx = bisect_right(_keys[max_pos], max_key) + else: + max_pos = bisect_left(_maxes, max_key) + + if max_pos == len(_maxes): + max_pos -= 1 + max_idx = len(_keys[max_pos]) + else: + max_idx = bisect_left(_keys[max_pos], max_key) + + return self._islice(min_pos, min_idx, max_pos, max_idx, reverse) + + _irange_key = irange_key + + def bisect_left(self, val): + """ + Similar to the *bisect* module in the standard library, this returns an + appropriate index to insert *val*. If *val* is already present, the + insertion point will be before (to the left of) any existing entries. + """ + return self._bisect_key_left(self._key(val)) + + def bisect_right(self, val): + """ + Same as *bisect_left*, but if *val* is already present, the insertion + point will be after (to the right of) any existing entries. + """ + return self._bisect_key_right(self._key(val)) + + bisect = bisect_right + + def bisect_key_left(self, key): + """ + Similar to the *bisect* module in the standard library, this returns an + appropriate index to insert a value with a given *key*. If values with + *key* are already present, the insertion point will be before (to the + left of) any existing entries. + """ + _maxes = self._maxes + + if not _maxes: + return 0 + + pos = bisect_left(_maxes, key) + + if pos == len(_maxes): + return self._len + + idx = bisect_left(self._keys[pos], key) + + return self._loc(pos, idx) + + _bisect_key_left = bisect_key_left + + def bisect_key_right(self, key): + """ + Same as *bisect_key_left*, but if *key* is already present, the insertion + point will be after (to the right of) any existing entries. + """ + _maxes = self._maxes + + if not _maxes: + return 0 + + pos = bisect_right(_maxes, key) + + if pos == len(_maxes): + return self._len + + idx = bisect_right(self._keys[pos], key) + + return self._loc(pos, idx) + + bisect_key = bisect_key_right + _bisect_key_right = bisect_key_right + + def count(self, val): + """Return the number of occurrences of *val* in the list.""" + _maxes = self._maxes + + if not _maxes: + return 0 + + key = self._key(val) + pos = bisect_left(_maxes, key) + + if pos == len(_maxes): + return 0 + + _lists = self._lists + _keys = self._keys + idx = bisect_left(_keys[pos], key) + total = 0 + len_keys = len(_keys) + len_sublist = len(_keys[pos]) + + while True: + if _keys[pos][idx] != key: + return total + if _lists[pos][idx] == val: + total += 1 + idx += 1 + if idx == len_sublist: + pos += 1 + if pos == len_keys: + return total + len_sublist = len(_keys[pos]) + idx = 0 + + def copy(self): + """Return a shallow copy of the sorted list.""" + return self.__class__(self, key=self._key) + + __copy__ = copy + + def append(self, val): + """ + Append the element *val* to the list. Raises a ValueError if the *val* + would violate the sort order. + """ + # pylint: disable=arguments-differ + _lists = self._lists + _keys = self._keys + _maxes = self._maxes + key = self._key(val) + + if not _maxes: + _maxes.append(key) + _keys.append([key]) + _lists.append([val]) + self._len = 1 + return + + pos = len(_keys) - 1 + + if key < _keys[pos][-1]: + msg = '{0!r} not in sort order at index {1}'.format(val, self._len) + raise ValueError(msg) + + _lists[pos].append(val) + _keys[pos].append(key) + _maxes[pos] = key + self._len += 1 + self._expand(pos) + + def extend(self, values): + """ + Extend the list by appending all elements from the *values*. Raises a + ValueError if the sort order would be violated. + """ + _lists = self._lists + _keys = self._keys + _maxes = self._maxes + _load = self._load + + if not isinstance(values, list): + values = list(values) + + keys = list(map(self._key, values)) + + if any(keys[pos - 1] > keys[pos] + for pos in range(1, len(keys))): + raise ValueError('given sequence not in sort order') + + offset = 0 + + if _maxes: + if keys[0] < _keys[-1][-1]: + msg = '{0!r} not in sort order at index {1}'.format(values[0], self._len) + raise ValueError(msg) + + if len(_keys[-1]) < self._half: + _lists[-1].extend(values[:_load]) + _keys[-1].extend(keys[:_load]) + _maxes[-1] = _keys[-1][-1] + offset = _load + + len_keys = len(_keys) + + for idx in range(offset, len(keys), _load): + _lists.append(values[idx:(idx + _load)]) + _keys.append(keys[idx:(idx + _load)]) + _maxes.append(_keys[-1][-1]) + + _index = self._index + + if len_keys == len(_keys): + len_index = len(_index) + if len_index > 0: + len_values = len(values) + child = len_index - 1 + while child: + _index[child] += len_values + child = (child - 1) >> 1 + _index[0] += len_values + else: + del _index[:] + + self._len += len(values) + + def insert(self, idx, val): + """ + Insert the element *val* into the list at *idx*. Raises a ValueError if + the *val* at *idx* would violate the sort order. + """ + _len = self._len + _lists = self._lists + _keys = self._keys + _maxes = self._maxes + + if idx < 0: + idx += _len + if idx < 0: + idx = 0 + if idx > _len: + idx = _len + + key = self._key(val) + + if not _maxes: + self._len = 1 + _lists.append([val]) + _keys.append([key]) + _maxes.append(key) + return + + if not idx: + if key > _keys[0][0]: + msg = '{0!r} not in sort order at index {1}'.format(val, 0) + raise ValueError(msg) + else: + self._len += 1 + _lists[0].insert(0, val) + _keys[0].insert(0, key) + self._expand(0) + return + + if idx == _len: + pos = len(_keys) - 1 + if _keys[pos][-1] > key: + msg = '{0!r} not in sort order at index {1}'.format(val, _len) + raise ValueError(msg) + else: + self._len += 1 + _lists[pos].append(val) + _keys[pos].append(key) + _maxes[pos] = _keys[pos][-1] + self._expand(pos) + return + + pos, idx = self._pos(idx) + idx_before = idx - 1 + if idx_before < 0: + pos_before = pos - 1 + idx_before = len(_keys[pos_before]) - 1 + else: + pos_before = pos + + before = _keys[pos_before][idx_before] + if before <= key <= _keys[pos][idx]: + self._len += 1 + _lists[pos].insert(idx, val) + _keys[pos].insert(idx, key) + self._expand(pos) + else: + msg = '{0!r} not in sort order at index {1}'.format(val, idx) + raise ValueError(msg) + + def index(self, val, start=None, stop=None): + """ + Return the smallest *k* such that L[k] == val and i <= k < j`. Raises + ValueError if *val* is not present. *stop* defaults to the end of the + list. *start* defaults to the beginning. Negative indices are supported, + as for slice indices. + """ + _len = self._len + + if not _len: + raise ValueError('{0!r} is not in list'.format(val)) + + if start is None: + start = 0 + if start < 0: + start += _len + if start < 0: + start = 0 + + if stop is None: + stop = _len + if stop < 0: + stop += _len + if stop > _len: + stop = _len + + if stop <= start: + raise ValueError('{0!r} is not in list'.format(val)) + + _maxes = self._maxes + key = self._key(val) + pos = bisect_left(_maxes, key) + + if pos == len(_maxes): + raise ValueError('{0!r} is not in list'.format(val)) + + stop -= 1 + _lists = self._lists + _keys = self._keys + idx = bisect_left(_keys[pos], key) + len_keys = len(_keys) + len_sublist = len(_keys[pos]) + + while True: + if _keys[pos][idx] != key: + raise ValueError('{0!r} is not in list'.format(val)) + if _lists[pos][idx] == val: + loc = self._loc(pos, idx) + if start <= loc <= stop: + return loc + elif loc > stop: + break + idx += 1 + if idx == len_sublist: + pos += 1 + if pos == len_keys: + raise ValueError('{0!r} is not in list'.format(val)) + len_sublist = len(_keys[pos]) + idx = 0 + + raise ValueError('{0!r} is not in list'.format(val)) + + def __add__(self, that): + """ + Return a new sorted list containing all the elements in *self* and + *that*. Elements in *that* do not need to be properly ordered with + respect to *self*. + """ + values = reduce(iadd, self._lists, []) + values.extend(that) + return self.__class__(values, key=self._key) + + def __mul__(self, that): + """ + Return a new sorted list containing *that* shallow copies of each item + in SortedListWithKey. + """ + values = reduce(iadd, self._lists, []) * that + return self.__class__(values, key=self._key) + + def __imul__(self, that): + """ + Increase the length of the list by appending *that* shallow copies of + each item. + """ + values = reduce(iadd, self._lists, []) * that + self._clear() + self._update(values) + return self + + @recursive_repr + def __repr__(self): + """Return string representation of sequence.""" + name = type(self).__name__ + values = list(self) + _key = self._key + return '{0}({1!r}, key={2!r})'.format(name, values, _key) + + def _check(self): + try: + # Check load parameters. + + assert self._load >= 4 + assert self._half == (self._load >> 1) + assert self._dual == (self._load << 1) + + # Check empty sorted list case. + + if self._maxes == []: + assert self._keys == [] + assert self._lists == [] + return + + assert self._maxes and self._keys and self._lists + + # Check all sublists are sorted. + + assert all(sublist[pos - 1] <= sublist[pos] + for sublist in self._keys + for pos in range(1, len(sublist))) + + # Check beginning/end of sublists are sorted. + + for pos in range(1, len(self._keys)): + assert self._keys[pos - 1][-1] <= self._keys[pos][0] + + # Check length of _maxes and _lists match. + + assert len(self._maxes) == len(self._lists) == len(self._keys) + + # Check _keys matches _key mapped to _lists. + + assert all(len(val_list) == len(key_list) + for val_list, key_list in zip(self._lists, self._keys)) + assert all(self._key(val) == key for val, key in + zip((_val for _val_list in self._lists for _val in _val_list), + (_key for _key_list in self._keys for _key in _key_list))) + + # Check _maxes is a map of _keys. + + assert all(self._maxes[pos] == self._keys[pos][-1] + for pos in range(len(self._maxes))) + + # Check load level is less than _dual. + + assert all(len(sublist) <= self._dual for sublist in self._lists) + + # Check load level is greater than _half for all + # but the last sublist. + + assert all(len(self._lists[pos]) >= self._half + for pos in range(0, len(self._lists) - 1)) + + # Check length. + + assert self._len == sum(len(sublist) for sublist in self._lists) + + # Check index. + + if self._index: + assert len(self._index) == self._offset + len(self._lists) + assert self._len == self._index[0] + + def test_offset_pos(pos): + "Test positional indexing offset." + from_index = self._index[self._offset + pos] + return from_index == len(self._lists[pos]) + + assert all(test_offset_pos(pos) + for pos in range(len(self._lists))) + + for pos in range(self._offset): + child = (pos << 1) + 1 + if self._index[pos] == 0: + assert child >= len(self._index) + elif child + 1 == len(self._index): + assert self._index[pos] == self._index[child] + else: + child_sum = self._index[child] + self._index[child + 1] + assert self._index[pos] == child_sum + + except: + import sys + import traceback + + traceback.print_exc(file=sys.stdout) + + print('len', self._len) + print('load', self._load, self._half, self._dual) + print('offset', self._offset) + print('len_index', len(self._index)) + print('index', self._index) + print('len_maxes', len(self._maxes)) + print('maxes', self._maxes) + print('len_keys', len(self._keys)) + print('keys', self._keys) + print('len_lists', len(self._lists)) + print('lists', self._lists) + + raise diff --git a/src/rez/vendor/sortedcontainers/sortedset.py b/src/rez/vendor/sortedcontainers/sortedset.py new file mode 100644 index 000000000..6d82b387b --- /dev/null +++ b/src/rez/vendor/sortedcontainers/sortedset.py @@ -0,0 +1,327 @@ +"""Sorted set implementation. + +""" + +from collections import Set, MutableSet, Sequence +from itertools import chain +import operator as op + +from .sortedlist import SortedList, recursive_repr, SortedListWithKey + +class SortedSet(MutableSet, Sequence): + """ + A `SortedSet` provides the same methods as a `set`. Additionally, a + `SortedSet` maintains its items in sorted order, allowing the `SortedSet` to + be indexed. + + Unlike a `set`, a `SortedSet` requires items be hashable and comparable. + """ + # pylint: disable=too-many-ancestors + def __init__(self, iterable=None, key=None): + """ + A `SortedSet` provides the same methods as a `set`. Additionally, a + `SortedSet` maintains its items in sorted order, allowing the + `SortedSet` to be indexed. + + An optional *iterable* provides an initial series of items to populate + the `SortedSet`. + + An optional *key* argument defines a callable that, like the `key` + argument to Python's `sorted` function, extracts a comparison key from + each set item. If no function is specified, the default compares the + set items directly. + """ + self._key = key + + if not hasattr(self, '_set'): + self._set = set() + + _set = self._set + self.isdisjoint = _set.isdisjoint + self.issubset = _set.issubset + self.issuperset = _set.issuperset + + if key is None: + self._list = SortedList(self._set) + else: + self._list = SortedListWithKey(self._set, key=key) + + _list = self._list + self.bisect_left = _list.bisect_left + self.bisect = _list.bisect + self.bisect_right = _list.bisect_right + self.index = _list.index + self.irange = _list.irange + self.islice = _list.islice + self._reset = _list._reset # pylint: disable=protected-access + + if key is not None: + self.bisect_key_left = _list.bisect_key_left + self.bisect_key_right = _list.bisect_key_right + self.bisect_key = _list.bisect_key + self.irange_key = _list.irange_key + + if iterable is not None: + self._update(iterable) + + @property + def key(self): + """Key function used to extract comparison key for sorting.""" + return self._key + + @classmethod + def _fromset(cls, values, key=None): + """Initialize sorted set from existing set.""" + sorted_set = object.__new__(cls) + sorted_set._set = values # pylint: disable=protected-access + sorted_set.__init__(key=key) + return sorted_set + + def __contains__(self, value): + """Return True if and only if *value* is an element in the set.""" + return value in self._set + + def __getitem__(self, index): + """ + Return the element at position *index*. + + Supports slice notation and negative indexes. + """ + return self._list[index] + + def __delitem__(self, index): + """ + Remove the element at position *index*. + + Supports slice notation and negative indexes. + """ + _set = self._set + _list = self._list + if isinstance(index, slice): + values = _list[index] + _set.difference_update(values) + else: + value = _list[index] + _set.remove(value) + del _list[index] + + def _make_cmp(self, set_op, doc): + "Make comparator method." + def comparer(self, that): + "Compare method for sorted set and set-like object." + # pylint: disable=protected-access + if isinstance(that, SortedSet): + return set_op(self._set, that._set) + elif isinstance(that, Set): + return set_op(self._set, that) + return NotImplemented + + comparer.__name__ = '__{0}__'.format(set_op.__name__) + doc_str = 'Return True if and only if Set is {0} `that`.' + comparer.__doc__ = doc_str.format(doc) + + return comparer + + __eq__ = _make_cmp(None, op.eq, 'equal to') + __ne__ = _make_cmp(None, op.ne, 'not equal to') + __lt__ = _make_cmp(None, op.lt, 'a proper subset of') + __gt__ = _make_cmp(None, op.gt, 'a proper superset of') + __le__ = _make_cmp(None, op.le, 'a subset of') + __ge__ = _make_cmp(None, op.ge, 'a superset of') + + def __len__(self): + """Return the number of elements in the set.""" + return len(self._set) + + def __iter__(self): + """ + Return an iterator over the Set. Elements are iterated in their sorted + order. + + Iterating the Set while adding or deleting values may raise a + `RuntimeError` or fail to iterate over all entries. + """ + return iter(self._list) + + def __reversed__(self): + """ + Return an iterator over the Set. Elements are iterated in their reverse + sorted order. + + Iterating the Set while adding or deleting values may raise a + `RuntimeError` or fail to iterate over all entries. + """ + return reversed(self._list) + + def add(self, value): + """Add the element *value* to the set.""" + _set = self._set + if value not in _set: + _set.add(value) + self._list.add(value) + + def clear(self): + """Remove all elements from the set.""" + self._set.clear() + self._list.clear() + + def copy(self): + """Create a shallow copy of the sorted set.""" + return self._fromset(set(self._set), key=self._key) + + __copy__ = copy + + def count(self, value): + """Return the number of occurrences of *value* in the set.""" + return 1 if value in self._set else 0 + + def discard(self, value): + """ + Remove the first occurrence of *value*. If *value* is not a member, + does nothing. + """ + _set = self._set + if value in _set: + _set.remove(value) + self._list.discard(value) + + def pop(self, index=-1): + """ + Remove and return item at *index* (default last). Raises IndexError if + set is empty or index is out of range. Negative indexes are supported, + as for slice indices. + """ + # pylint: disable=arguments-differ + value = self._list.pop(index) + self._set.remove(value) + return value + + def remove(self, value): + """ + Remove first occurrence of *value*. Raises ValueError if + *value* is not present. + """ + self._set.remove(value) + self._list.remove(value) + + def difference(self, *iterables): + """ + Return a new set with elements in the set that are not in the + *iterables*. + """ + diff = self._set.difference(*iterables) + return self._fromset(diff, key=self._key) + + __sub__ = difference + __rsub__ = __sub__ + + def difference_update(self, *iterables): + """ + Update the set, removing elements found in keeping only elements + found in any of the *iterables*. + """ + _set = self._set + values = set(chain(*iterables)) + if (4 * len(values)) > len(_set): + _list = self._list + _set.difference_update(values) + _list.clear() + _list.update(_set) + else: + _discard = self.discard + for value in values: + _discard(value) + return self + + __isub__ = difference_update + + def intersection(self, *iterables): + """ + Return a new set with elements common to the set and all *iterables*. + """ + comb = self._set.intersection(*iterables) + return self._fromset(comb, key=self._key) + + __and__ = intersection + __rand__ = __and__ + + def intersection_update(self, *iterables): + """ + Update the set, keeping only elements found in it and all *iterables*. + """ + _set = self._set + _list = self._list + _set.intersection_update(*iterables) + _list.clear() + _list.update(_set) + return self + + __iand__ = intersection_update + + def symmetric_difference(self, that): + """ + Return a new set with elements in either *self* or *that* but not both. + """ + diff = self._set.symmetric_difference(that) + return self._fromset(diff, key=self._key) + + __xor__ = symmetric_difference + __rxor__ = __xor__ + + def symmetric_difference_update(self, that): + """ + Update the set, keeping only elements found in either *self* or *that*, + but not in both. + """ + _set = self._set + _list = self._list + _set.symmetric_difference_update(that) + _list.clear() + _list.update(_set) + return self + + __ixor__ = symmetric_difference_update + + def union(self, *iterables): + """ + Return a new SortedSet with elements from the set and all *iterables*. + """ + return self.__class__(chain(iter(self), *iterables), key=self._key) + + __or__ = union + __ror__ = __or__ + + def update(self, *iterables): + """Update the set, adding elements from all *iterables*.""" + _set = self._set + values = set(chain(*iterables)) + if (4 * len(values)) > len(_set): + _list = self._list + _set.update(values) + _list.clear() + _list.update(_set) + else: + _add = self.add + for value in values: + _add(value) + return self + + __ior__ = update + _update = update + + def __reduce__(self): + return (type(self), (self._set, self._key)) + + @recursive_repr + def __repr__(self): + _key = self._key + key = '' if _key is None else ', key={0!r}'.format(_key) + name = type(self).__name__ + return '{0}({1!r}{2})'.format(name, list(self), key) + + def _check(self): + # pylint: disable=protected-access + self._list._check() + assert len(self._set) == len(self._list) + _set = self._set + assert all(val in _set for val in self._list) From 19eaa7254910f72e0593176cb19cf8072c34fd5b Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 3 Oct 2017 15:25:29 +1100 Subject: [PATCH 090/124] -minor optimisation in version code --- src/rez/vendor/version/version.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/rez/vendor/version/version.py b/src/rez/vendor/version/version.py index ebb129a33..de475190c 100644 --- a/src/rez/vendor/version/version.py +++ b/src/rez/vendor/version/version.py @@ -763,7 +763,7 @@ def __init__(self, range_str='', make_token=AlphanumericVersionToken, impossible range is given, such as '3+<2'. """ self._str = None - self.bounds = [] + self.bounds = [] # note: kept in ascending order if range_str is None: return @@ -998,8 +998,11 @@ def contains_version(self, version): if len(self.bounds) < 5: # not worth overhead of binary search for bound in self.bounds: - if bound.contains_version(version): + i = bound.version_containment(version) + if i == 0: return True + if i == -1: + return False else: _, contains = self._contains_version(version) return contains @@ -1217,7 +1220,9 @@ def _issuperset(cls, bounds1, bounds2): @classmethod def _intersects(cls, bounds1, bounds2): + # sort so bounds1 is the shorter list bounds1, bounds2 = sorted((bounds1, bounds2), key=lambda x: len(x)) + if len(bounds2) < 5: # not worth overhead of binary search for bound1 in bounds1: From 7256168a07ef569d34f9dcb963f3ef47a4edf6e3 Mon Sep 17 00:00:00 2001 From: ajohns Date: Wed, 4 Oct 2017 11:01:06 +1100 Subject: [PATCH 091/124] -added stats gathering to solver -added some basic timings also --- src/rez/SOLVER.md | 4 +- src/rez/cli/env.py | 6 +- src/rez/resolved_context.py | 8 +- src/rez/resolver.py | 7 +- src/rez/solver.py | 195 +++++++++++++++++++++++++----------- 5 files changed, 154 insertions(+), 66 deletions(-) diff --git a/src/rez/SOLVER.md b/src/rez/SOLVER.md index b85602c87..4dcf1336c 100644 --- a/src/rez/SOLVER.md +++ b/src/rez/SOLVER.md @@ -193,7 +193,7 @@ This indicates that there are still outstanding *extractions* for this scope. You will see this once, at the start of the solve. It simply prints the initial request list. - merged_request: foo-1.2 bah-3 + merged request: foo-1.2 bah-3 You will see this once and immediately after the `request:` output. It shows a simplified (merged) version of the initial request. Notice here how `~foo-1` is @@ -206,7 +206,7 @@ This is pushing the initial *phase* onto the *phase stack*. The `{0,0}` means that: * There is 1 phase in the stack (this is the zeroeth phase - phases are pushed - and popped from the bottom of the stack); + and popped from the bottom of the stack); * Zero other phases have already been solved (or failed) at this depth so far. -------------------------------------------------------------------------------- diff --git a/src/rez/cli/env.py b/src/rez/cli/env.py index eb78afb30..95f97f1f3 100644 --- a/src/rez/cli/env.py +++ b/src/rez/cli/env.py @@ -101,6 +101,9 @@ def setup_parser(parser, completions=False): "--no-passive", action="store_true", help="only print actions that affect the solve (has an effect only " "when verbosity is enabled)") + parser.add_argument( + "--stats", action="store_true", + help="print advanced solver stats") parser.add_argument( "--pre-command", type=str, help=SUPPRESS) PKG_action = parser.add_argument( @@ -197,7 +200,8 @@ def command(opts, parser, extra_arg_groups=None): max_fails=opts.max_fails, time_limit=opts.time_limit, caching=(not opts.no_cache), - suppress_passive=opts.no_passive) + suppress_passive=opts.no_passive, + print_stats=opts.stats) success = (context.status == ResolverStatus.solved) if not success: diff --git a/src/rez/resolved_context.py b/src/rez/resolved_context.py index 186aa88c2..448e2728d 100644 --- a/src/rez/resolved_context.py +++ b/src/rez/resolved_context.py @@ -139,7 +139,8 @@ def __init__(self, package_requests, verbosity=0, timestamp=None, building=False, caching=None, package_paths=None, package_filter=None, package_orderers=None, max_fails=-1, add_implicit_packages=True, time_limit=-1, callback=None, - package_load_callback=None, buf=None, suppress_passive=False): + package_load_callback=None, buf=None, suppress_passive=False, + print_stats=False): """Perform a package resolve, and store the result. Args: @@ -173,6 +174,7 @@ def __init__(self, package_requests, verbosity=0, timestamp=None, suppress_passive (bool): If True, don't print debugging info that has had no effect on the solve. This argument only has an effect if `verbosity` > 2. + print_stats (bool): If true, print advanced solver stats at the end. """ self.load_path = None @@ -263,7 +265,9 @@ def _package_load_callback(package): package_load_callback=_package_load_callback, verbosity=verbosity, buf=buf, - suppress_passive=suppress_passive) + suppress_passive=suppress_passive, + print_stats=print_stats) + resolver.solve() # convert the results diff --git a/src/rez/resolver.py b/src/rez/resolver.py index ccaec96d4..f17342054 100644 --- a/src/rez/resolver.py +++ b/src/rez/resolver.py @@ -34,7 +34,7 @@ class Resolver(object): def __init__(self, context, package_requests, package_paths, package_filter=None, package_orderers=None, timestamp=0, callback=None, building=False, verbosity=False, buf=None, package_load_callback=None, caching=True, - suppress_passive=False): + suppress_passive=False, print_stats=False): """Create a Resolver. Args: @@ -50,6 +50,7 @@ def __init__(self, context, package_requests, package_paths, package_filter=None building: True if we're resolving for a build. caching: If True, cache(s) may be used to speed the resolve. If False, caches will not be used. + print_stats (bool): If true, print advanced solver stats at the end. """ self.context = context self.package_requests = package_requests @@ -63,6 +64,7 @@ def __init__(self, context, package_requests, package_paths, package_filter=None self.caching = caching self.buf = buf self.suppress_passive = suppress_passive + self.print_stats = print_stats # store hash of package orderers. This is used in the memcached key if package_orderers: @@ -386,7 +388,8 @@ def _solve(self): verbosity=self.verbosity, prune_unfailed=config.prune_failed_graph, buf=self.buf, - suppress_passive=self.suppress_passive) + suppress_passive=self.suppress_passive, + print_stats=self.print_stats) solver.solve() return solver diff --git a/src/rez/solver.py b/src/rez/solver.py index 1f88df559..5e2c9e6dd 100644 --- a/src/rez/solver.py +++ b/src/rez/solver.py @@ -22,6 +22,7 @@ RequirementList from rez.vendor.enum import Enum from rez.vendor.sortedcontainers.sortedset import SortedSet +from contextlib import contextmanager import copy import time import sys @@ -591,18 +592,24 @@ def iter_variants(self): yield variant def intersect(self, range_): - """Remove variants whos version fall outside of the given range.""" + self.solver.intersection_broad_tests_count += 1 + + """Remove variants whose version fall outside of the given range.""" if range_.is_any(): return self - if range_ in self.been_intersected_with: - return self + if self.solver.optimised: + if range_ in self.been_intersected_with: + return self if self.pr: self.pr.passive("intersecting %s wrt range '%s'...", self, range_) - # this is faster than iter_intersecting :( - entries = [x for x in self.entries if x.version in range_] + self.solver.intersection_tests_count += 1 + + with self.solver.timed(self.solver.intersection_time): + # this is faster than iter_intersecting :( + entries = [x for x in self.entries if x.version in range_] if not entries: return None @@ -622,16 +629,23 @@ def reduce_by(self, package_request): (VariantSlice, [Reduction]) tuple, where slice may be None if all variants were reduced. """ - if package_request in self.been_reduced_by: - return (self, []) + if self.pr: + reqstr = _short_req_str(package_request) + self.pr.passive("reducing %s wrt %s...", self, reqstr) + + if self.solver.optimised: + if package_request in self.been_reduced_by: + return (self, []) if (package_request.range is None) or \ (package_request.name not in self.fam_requires): return (self, []) - if self.pr: - reqstr = _short_req_str(package_request) - self.pr.passive("reducing %s wrt %s...", self, reqstr) + with self.solver.timed(self.solver.reduction_time): + return self._reduce_by(package_request) + + def _reduce_by(self, package_request): + self.solver.reduction_tests_count += 1 entries = [] reductions = [] @@ -984,13 +998,13 @@ def reduce_by(self, package_request): scope copy with reductions applied, or self if there were no reductions, or None if the scope was completely reduced. """ + self.solver.reduction_broad_tests_count += 1 + if self.package_request.conflict: # conflict scopes don't reduce. Instead, other scopes will be # reduced against a conflict scope. return (self, []) - self.solver.reduction_tests_count += 1 - # perform the reduction new_slice, reductions = self.variant_slice.reduce_by(package_request) @@ -1085,7 +1099,7 @@ def _update(self): def __str__(self): if self.variant_slice is None: - return "req(%s)" % str(self.package_request) + return str(self.package_request) else: return str(self.variant_slice) @@ -1183,18 +1197,19 @@ def _create_phase(status=None): extracted_requests = [] # perform all possible extractions - for i in range(len(scopes)): - while True: - scope_, extracted_request = scopes[i].extract() - - if extracted_request: - extracted_requests.append(extracted_request) - k = (scopes[i].package_name, extracted_request.name) - extractions[k] = extracted_request - self.solver.extractions_count += 1 - scopes[i] = scope_ - else: - break + with self.solver.timed(self.solver.extraction_time): + for i in range(len(scopes)): + while True: + scope_, extracted_request = scopes[i].extract() + + if extracted_request: + extracted_requests.append(extracted_request) + k = (scopes[i].package_name, extracted_request.name) + extractions[k] = extracted_request + self.solver.extractions_count += 1 + scopes[i] = scope_ + else: + break if not extracted_requests: break @@ -1215,10 +1230,13 @@ def _create_phase(status=None): self.pr.subheader("INTERSECTING:") req_fams = [] - for i, scope in enumerate(scopes): - extracted_req = extracted_requests.get(scope.package_name) + with self.solver.timed(self.solver.intersection_test_time): + for i, scope in enumerate(scopes): + extracted_req = extracted_requests.get(scope.package_name) + + if extracted_req is None: + continue - if extracted_req is not None: # perform the intersection scope_ = scope.intersect(extracted_req.range) @@ -1231,7 +1249,7 @@ def _create_phase(status=None): failure_reason = DependencyConflicts([conflict]) return _create_phase(SolverStatus.failed) - elif scope_ is not scope: + if scope_ is not scope: # the scope was narrowed because it intersected # with an extraction scopes[i] = scope_ @@ -1321,23 +1339,24 @@ def _create_phase(status=None): # iteratively reduce until there are no more pending reductions. # Note that if a scope is reduced, then other scopes need to reduce # against it once again. - while pending_reducts: - x, y = pending_reducts.pop() + with self.solver.timed(self.solver.reduction_test_time): + while pending_reducts: + x, y = pending_reducts.pop() - new_scope, reductions = scopes[x].reduce_by( - scopes[y].package_request) + new_scope, reductions = scopes[x].reduce_by( + scopes[y].package_request) - if new_scope is None: - failure_reason = TotalReduction(reductions) - return _create_phase(SolverStatus.failed) + if new_scope is None: + failure_reason = TotalReduction(reductions) + return _create_phase(SolverStatus.failed) - elif new_scope is not scopes[x]: - scopes[x] = new_scope + elif new_scope is not scopes[x]: + scopes[x] = new_scope - # other scopes need to reduce against x again - for j in all_scopes_i: - if j != x: - pending_reducts.add((j, x)) + # other scopes need to reduce against x again + for j in all_scopes_i: + if j != x: + pending_reducts.add((j, x)) changed_scopes_i = set() @@ -1739,7 +1758,7 @@ def __init__(self, package_requests, package_paths, context=None, package_filter=None, package_orderers=None, callback=None, building=False, optimised=True, verbosity=0, buf=None, package_load_callback=None, prune_unfailed=True, - suppress_passive=False): + suppress_passive=False, print_stats=False): """Create a Solver. Args: @@ -1771,11 +1790,11 @@ def __init__(self, package_requests, package_paths, context=None, suppress_passive (bool): If True, don't print debugging info that has had no effect on the solve. This argument only has an effect if `verbosity` > 2. + print_stats (bool): If true, print advanced solver stats at the end. """ self.package_paths = package_paths self.package_filter = package_filter self.package_orderers = package_orderers - self.pr = _Printer(verbosity, buf=buf, suppress_passive=suppress_passive) self.callback = callback self.prune_unfailed = prune_unfailed self.package_load_callback = package_load_callback @@ -1783,6 +1802,10 @@ def __init__(self, package_requests, package_paths, context=None, self.request_list = None self.context = context + self.pr = _Printer(verbosity, buf=buf, suppress_passive=suppress_passive) + self.print_stats = print_stats + self.buf = buf + if _force_unoptimised_solver: self.optimised = False else: @@ -1796,16 +1819,25 @@ def __init__(self, package_requests, package_paths, context=None, self.abort_reason = None self.callback_return = None self.depth_counts = None + self.solve_begun = None self.solve_time = None self.load_time = None - self.solve_begun = None # advanced solve metrics self.solve_count = 0 self.extractions_count = 0 self.intersections_count = 0 + self.intersection_tests_count = 0 + self.intersection_broad_tests_count = 0 self.reductions_count = 0 self.reduction_tests_count = 0 + self.reduction_broad_tests_count = 0 + + self.extraction_time = [0.0] + self.intersection_time = [0.0] + self.intersection_test_time = [0.0] + self.reduction_time = [0.0] + self.reduction_test_time = [0.0] self._init() @@ -1835,6 +1867,13 @@ def __init__(self, package_requests, package_paths, context=None, phase = _ResolvePhase(solver=self) self._push_phase(phase) + @contextmanager + def timed(self, target): + t = time.time() + yield + secs = time.time() - t + target[0] += secs + @property def status(self): """Return the current status of the solve. @@ -1921,19 +1960,50 @@ def solve(self): self.solve_time = time.time() - t1 # print stats - if self.pr: - d = { - "num_solves": self.solve_count, - "num_fails": self.num_fails, - "num_extractions": self.extractions_count, - "num_intersections": self.intersections_count, - "num_reductions": self.reductions_count, - "num_reduction_tests": self.reduction_tests_count - } - + if self.pr.verbosity > 2: from pprint import pformat self.pr.subheader("SOLVE STATS:") - self.pr(pformat(d)) + self.pr(pformat(self.solve_stats)) + + elif self.print_stats: + from pprint import pformat + data = {"solve_stats": self.solve_stats} + print >> (self.buf or sys.stdout), pformat(data) + + @property + def solve_stats(self): + extraction_stats = { + "extraction_time": self.extraction_time[0], + "num_extractions": self.extractions_count + } + + intersection_stats = { + "num_intersections": self.intersections_count, + "num_intersection_tests": self.intersection_tests_count, + "num_intersection_broad_tests": self.intersection_broad_tests_count, + "intersection_time": self.intersection_time[0], + "intersection_test_time": self.intersection_test_time[0] + } + + reduction_stats = { + "num_reductions": self.reductions_count, + "num_reduction_tests": self.reduction_tests_count, + "num_reduction_broad_tests": self.reduction_broad_tests_count, + "reduction_time": self.reduction_time[0], + "reduction_test_time": self.reduction_test_time[0] + } + + global_stats = { + "solve_time": self.solve_time, + "load_time": self.load_time + } + + return { + "global": global_stats, + "extractions": extraction_stats, + "intersections": intersection_stats, + "reductions": reduction_stats + } def solve_step(self): """Perform a single solve step. @@ -1980,8 +2050,6 @@ def solve_step(self): self.pr.header("FAIL: a cycle was detected") else: self.pr.header("SUCCESS") - self.pr("solve time: %.2f seconds", self.solve_time) - self.pr("load time: %.2f seconds", self.load_time) else: self.pr.subheader("EXHAUSTED:") @@ -2089,14 +2157,23 @@ def _init(self): self.depth_counts = {} self.solve_time = 0.0 self.load_time = 0.0 + self.solve_begun = False + # advanced solve stats self.solve_count = 0 self.extractions_count = 0 self.intersections_count = 0 + self.intersection_tests_count = 0 + self.intersection_broad_tests_count = 0 self.reductions_count = 0 self.reduction_tests_count = 0 + self.reduction_broad_tests_count = 0 - self.solve_begun = False + self.extraction_time = [0.0] + self.intersection_time = [0.0] + self.intersection_test_time = [0.0] + self.reduction_time = [0.0] + self.reduction_test_time = [0.0] def _latest_nonfailed_phase(self): if self.status == SolverStatus.failed: From 1541403c5cc7e07dcf98a19e43fc46b285748734 Mon Sep 17 00:00:00 2001 From: ajohns Date: Wed, 4 Oct 2017 13:25:52 +1100 Subject: [PATCH 092/124] -fixed unbelievably simple bug in RequirementList -this was actually the true cause of the solver fail,... -...however that bug was working in tadem with the previously fixed bug also --- src/rez/solver.py | 7 ++++++- src/rez/vendor/version/requirement.py | 25 +++++++++++++++---------- src/rez/vendor/version/test.py | 6 ++++++ 3 files changed, 27 insertions(+), 11 deletions(-) diff --git a/src/rez/solver.py b/src/rez/solver.py index 5e2c9e6dd..620ae7471 100644 --- a/src/rez/solver.py +++ b/src/rez/solver.py @@ -706,7 +706,10 @@ def extract(self): return self, None extractable = self.common_fams - self.extracted_fams - fam = iter(extractable).next() + + # the sort is necessary to ensure solves are deterministic + fam = sorted(extractable)[0] + last_range = None ranges = set() @@ -1994,6 +1997,8 @@ def solve_stats(self): } global_stats = { + "num_solves": self.num_solves, + "num_fails": self.num_fails, "solve_time": self.solve_time, "load_time": self.load_time } diff --git a/src/rez/vendor/version/requirement.py b/src/rez/vendor/version/requirement.py index d60e7e2c1..305771368 100644 --- a/src/rez/vendor/version/requirement.py +++ b/src/rez/vendor/version/requirement.py @@ -315,7 +315,7 @@ def __init__(self, requirements): Args: requirements: List of Requirement objects. """ - self.requirements_ = None + self.requirements_ = [] self.conflict_ = None self.requirements_dict = {} self.names_ = set() @@ -323,23 +323,28 @@ def __init__(self, requirements): for req in requirements: existing_req = self.requirements_dict.get(req.name) - if existing_req: + + if existing_req is None: + self.requirements_dict[req.name] = req + else: merged_req = existing_req.merged(req) if merged_req is None: self.conflict_ = (existing_req, req) return else: self.requirements_dict[req.name] = merged_req - else: - self.requirements_dict[req.name] = req - names = set() - self.requirements_ = [] + seen = set() + + # build optimised list, this intends to match original request order + # as closely as possible for req in requirements: - if req.name not in names: - names.add(req.name) - self.requirements_.append(self.requirements_dict[req.name]) - if req.conflict: + if req.name not in seen: + seen.add(req.name) + req_ = self.requirements_dict[req.name] + self.requirements_.append(req_) + + if req_.conflict: self.conflict_names_.add(req.name) else: self.names_.add(req.name) diff --git a/src/rez/vendor/version/test.py b/src/rez/vendor/version/test.py index 63359eebf..f9000aa02 100644 --- a/src/rez/vendor/version/test.py +++ b/src/rez/vendor/version/test.py @@ -438,6 +438,12 @@ def _eq(reqs, expected_reqs): exp_reqs_ = [Requirement(x) for x in expected_reqs] self.assertTrue(reqlist.requirements == exp_reqs_) + exp_names = set(x.name for x in exp_reqs_ if not x.conflict) + self.assertTrue(reqlist.names == exp_names) + + exp_confl_names = set(x.name for x in exp_reqs_ if x.conflict) + self.assertTrue(reqlist.conflict_names == exp_confl_names) + def _confl(reqs, a, b): _print("requirements(%s) == %s <--!--> %s" % (' '.join(reqs), a, b)) reqs_ = [Requirement(x) for x in reqs] From 232d172a1f83c4e0f606058c0e818ebf4214b74f Mon Sep 17 00:00:00 2001 From: ajohns Date: Wed, 4 Oct 2017 15:19:37 +1100 Subject: [PATCH 093/124] -added test which catches failure described in issue #458 --- src/rez/tests/test_packages.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/rez/tests/test_packages.py b/src/rez/tests/test_packages.py index 37790e21e..ddf04fb31 100644 --- a/src/rez/tests/test_packages.py +++ b/src/rez/tests/test_packages.py @@ -339,16 +339,22 @@ def _test(orderer, package_name, expected_order): null_orderer = NullPackageOrder() split_orderer = VersionSplitPackageOrder(Version("2.6.0")) + # after v1.1.0 and before v1.1.1 timestamp_orderer = TimestampPackageOrder(timestamp=3001, rank=3) + # after v2.1.0 and before v2.1.5 + timestamp2_orderer = TimestampPackageOrder(timestamp=7001, rank=3) expected_null_result = ["7", "6", "5"] expected_split_result = ["2.6.0", "2.5.2", "2.7.0", "2.6.8"] expected_timestamp_result = ["1.1.1", "1.1.0", "1.0.6", "1.0.5", "1.2.0", "2.0.0", "2.1.5", "2.1.0"] + expected_timestamp2_result = ["2.1.5", "2.1.0", "2.0.0", "1.2.0", + "1.1.1", "1.1.0", "1.0.6", "1.0.5"] _test(null_orderer, "pysplit", expected_null_result) _test(split_orderer, "python", expected_split_result) _test(timestamp_orderer, "timestamped", expected_timestamp_result) + _test(timestamp2_orderer, "timestamped", expected_timestamp2_result) fam_orderer = PerFamilyOrder( order_dict=dict(pysplit=null_orderer, From f8c312e18b78d9f15585c9a1955e97e34d948878 Mon Sep 17 00:00:00 2001 From: ajohns Date: Wed, 4 Oct 2017 15:32:09 +1100 Subject: [PATCH 094/124] -fixed orderer bug and added test --- src/rez/package_order.py | 7 +++++++ src/rez/utils/_version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/src/rez/package_order.py b/src/rez/package_order.py index 3d9f0321b..44414bc66 100644 --- a/src/rez/package_order.py +++ b/src/rez/package_order.py @@ -332,13 +332,20 @@ def reorder(self, iterable, key=None): if before and after: package = key(before[0]) first_prerank = package.version.trim(self.rank - 1) + found = False for i, o in enumerate(after): package = key(o) prerank = package.version.trim(self.rank - 1) if prerank != first_prerank: + found = True break + if not found: + # highest version is also within rank, so result is just + # simple descending list + return descending + if i: before = list(reversed(after[:i])) + before after = after[i:] diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index 5f272621f..a94351dd8 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.14.0" +_rez_version = "2.14.1" try: from rez.vendor.version.version import Version From 4585f26419dc1359d1aa6d91e95905d36aa921f8 Mon Sep 17 00:00:00 2001 From: Blazej Floch Date: Thu, 12 Oct 2017 12:15:01 -0400 Subject: [PATCH 095/124] Implementing tag_exists for hg. Allows check_tag to work with hg. --- src/rezplugins/release_vcs/hg.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/rezplugins/release_vcs/hg.py b/src/rezplugins/release_vcs/hg.py index c9e4d1bf9..83788b282 100644 --- a/src/rezplugins/release_vcs/hg.py +++ b/src/rezplugins/release_vcs/hg.py @@ -152,6 +152,10 @@ def get_tags(self, patch=False): tags[tag_name] = {'rev': rev, 'shortnode': shortnode} return tags + def tag_exists(self, tag_name): + tags = self.get_tags() + return (tag_name in tags.keys()) + def is_ancestor(self, commit1, commit2, patch=False): """Returns True if commit1 is a direct ancestor of commit2, or False otherwise. From aacd775e6e67f682bb71190058aa28f87e051d3f Mon Sep 17 00:00:00 2001 From: Brendan Abel Date: Mon, 27 Nov 2017 15:23:04 -0800 Subject: [PATCH 096/124] Rolled back accidental studio-specific changes to example file. --- src/support/shotgun_toolkit/rez_app_launch.py | 25 ++----------------- 1 file changed, 2 insertions(+), 23 deletions(-) diff --git a/src/support/shotgun_toolkit/rez_app_launch.py b/src/support/shotgun_toolkit/rez_app_launch.py index 48ce9211d..c1c3f2dfc 100755 --- a/src/support/shotgun_toolkit/rez_app_launch.py +++ b/src/support/shotgun_toolkit/rez_app_launch.py @@ -59,33 +59,13 @@ def execute(self, app_path, app_args, version, **kwargs): # NUKE_PATH is used by tk-nuke # HIERO_PLUGIN_PATH is used by tk-nuke (nukestudio) # KATANA_RESOURCES is used by tk-katana - parent_variables = ["PYTHONPATH", "HOUDINI_PATH", "NUKE_PATH", - "HIERO_PLUGIN_PATH", "KATANA_RESOURCES"] + config.parent_variables = ["PYTHONPATH", "HOUDINI_PATH", "NUKE_PATH", "HIERO_PLUGIN_PATH", "KATANA_RESOURCES"] rez_packages = extra["rez_packages"] context = ResolvedContext(rez_packages) use_rez = True - # Rez env callback to restore sgtk paths setup by the shotgun launcher - # and the individual engines. - def restore_sgtk_env(executor): - """ - Restore the settings from the current tank environment setup - that happened before rez was able to run. - - """ - for envvar in parent_variables: - paths = os.environ.get(envvar, '').split(';') - #TODO: Remove this when P:\code is removed from domain policy - # P:\code is normally removed by rez, but since we have to - # restore some of the env vars setup by tank, we need to - # pull out the non-tank envvars setup here, which is mostly any - # path on P:\code. - paths = [p for p in paths if r'P:\code' not in p] - for path in reversed(paths): - getattr(executor.env, envvar).prepend(path) - system = sys.platform shell_type = 'bash' if system == "linux2": @@ -122,8 +102,7 @@ def restore_sgtk_env(executor): parent_environ=n_env, shell=shell_type, stdin=False, - block=False, - post_actions_callback=restore_sgtk_env, + block=False ) exit_code = proc.wait() context.print_info(verbosity=True) From 37acf748befc6a47622521c3917f9c1fd72448ab Mon Sep 17 00:00:00 2001 From: Brendan Abel Date: Mon, 27 Nov 2017 15:25:50 -0800 Subject: [PATCH 097/124] Removed sh and pbs import for git plugin. --- src/rezplugins/release_vcs/git.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/src/rezplugins/release_vcs/git.py b/src/rezplugins/release_vcs/git.py index b07b07984..9aac55bd0 100644 --- a/src/rezplugins/release_vcs/git.py +++ b/src/rezplugins/release_vcs/git.py @@ -4,11 +4,6 @@ from rez.release_vcs import ReleaseVCS from rez.utils.logging_ import print_error, print_warning, print_debug from rez.exceptions import ReleaseVCSError -import platform -if "windows" in platform.system().lower(): - from rez.vendor.pbs import git -else: - from rez.vendor.sh.sh import git from shutil import rmtree import functools import os.path From cf615c9d6c2157ed34f2f600c52a0338e9386b7d Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 28 Nov 2017 11:31:34 +1100 Subject: [PATCH 098/124] version up --- src/rez/utils/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index a94351dd8..0cc320a12 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.14.1" +_rez_version = "2.14.2" try: from rez.vendor.version.version import Version From 8533a69877f2b1bdc79aa43e5082b2d16fa8733f Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 28 Nov 2017 12:44:00 +1100 Subject: [PATCH 099/124] version up --- src/rez/utils/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index 0cc320a12..f4a88ca91 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.14.2" +_rez_version = "2.15.0" try: from rez.vendor.version.version import Version From 2e1a8744a204a8555cf5fdd997402f5011d1a338 Mon Sep 17 00:00:00 2001 From: Brendan Abel Date: Mon, 27 Nov 2017 18:19:57 -0800 Subject: [PATCH 100/124] Removed sh and pbs dependency in release_hook. Deleted sh and pbs from vendor library. --- src/rez/vendor/pbs.py | 602 -------- src/rez/vendor/sh/LICENSE.txt | 19 - src/rez/vendor/sh/__init__.py | 6 - src/rez/vendor/sh/sh.py | 1773 ------------------------ src/rezplugins/release_hook/command.py | 46 +- 5 files changed, 22 insertions(+), 2424 deletions(-) delete mode 100644 src/rez/vendor/pbs.py delete mode 100644 src/rez/vendor/sh/LICENSE.txt delete mode 100644 src/rez/vendor/sh/__init__.py delete mode 100644 src/rez/vendor/sh/sh.py diff --git a/src/rez/vendor/pbs.py b/src/rez/vendor/pbs.py deleted file mode 100644 index be1dc274d..000000000 --- a/src/rez/vendor/pbs.py +++ /dev/null @@ -1,602 +0,0 @@ -#=============================================================================== -# Copyright (C) 2011-2012 by Andrew Moffat -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. -#=============================================================================== - - - -import subprocess as subp -import sys -import traceback -import os -import re -from glob import glob as original_glob -from types import ModuleType -from functools import partial -import warnings -import platform - - -__version__ = "0.110" -__project_url__ = "https://github.com/amoffat/pbs" - -IS_PY3 = sys.version_info[0] == 3 -if IS_PY3: - raw_input = input - unicode = str -else: - pass - - -if "windows" not in platform.system().lower(): - warnings.simplefilter("always") - warnings.warn(""" - -Sh.py is the new pbs. Please download and install sh.py with the following -command: - - $ pip install sh - -or - - $ easy_install sh - -Sh.py includes many enhancements and will be the supported subprocess launcher -for the future. See its documentation here http://amoffat.github.com/sh/. - -To migrate existing code, try this: - - import sh as pbs - -""", DeprecationWarning) - - - -class ErrorReturnCode(Exception): - truncate_cap = 200 - - def __init__(self, full_cmd, stdout, stderr): - self.full_cmd = full_cmd - self.stdout = stdout - self.stderr = stderr - - if self.stdout is None: tstdout = "" - else: - tstdout = self.stdout[:self.truncate_cap] - out_delta = len(self.stdout) - len(tstdout) - if out_delta: - tstdout += ("... (%d more, please see e.stdout)" % out_delta).encode() - - if self.stderr is None: tstderr = "" - else: - tstderr = self.stderr[:self.truncate_cap] - err_delta = len(self.stderr) - len(tstderr) - if err_delta: - tstderr += ("... (%d more, please see e.stderr)" % err_delta).encode() - - msg = "\n\nRan: %r\n\nSTDOUT:\n\n %s\n\nSTDERR:\n\n %s" %\ - (full_cmd, tstdout.decode(), tstderr.decode()) - super(ErrorReturnCode, self).__init__(msg) - -class CommandNotFound(Exception): pass - -rc_exc_regex = re.compile("ErrorReturnCode_(\d+)") -rc_exc_cache = {} - -def get_rc_exc(rc): - rc = int(rc) - try: return rc_exc_cache[rc] - except KeyError: pass - - name = "ErrorReturnCode_%d" % rc - exc = type(name, (ErrorReturnCode,), {}) - rc_exc_cache[rc] = exc - return exc - - - - -def which(program): - is_windows = "windows" in platform.system().lower() - endswith_exe = program.lower().endswith('.exe') - def is_exe(fpath): - return os.path.exists(fpath) and os.access(fpath, os.X_OK) - - fpath, fname = os.path.split(program) - if fpath: - if is_exe(program): - return program - if is_windows and not endswith_exe: - if is_exe(exe_file + '.exe'): - return exe_file + '.exe' - else: - for path in os.environ["PATH"].split(os.pathsep): - exe_file = os.path.join(path, program) - if is_exe(exe_file): - return exe_file - if is_windows and not endswith_exe: - if is_exe(exe_file + '.exe'): - return exe_file + '.exe' - - return None - -def resolve_program(program): - path = which(program) - if not path: - # our actual command might have a dash in it, but we can't call - # that from python (we have to use underscores), so we'll check - # if a dash version of our underscore command exists and use that - # if it does - if "_" in program: path = which(program.replace("_", "-")) - if not path: return None - return path - - -def glob(arg): - return original_glob(arg) or arg - - - - -class RunningCommand(object): - def __init__(self, command_ran, process, call_args, stdin=None): - self.command_ran = command_ran - self.process = process - self._stdout = None - self._stderr = None - self.call_args = call_args - - # we're running in the background, return self and let us lazily - # evaluate - if self.call_args["bg"]: return - - # we're running this command as a with context, don't do anything - # because nothing was started to run from Command.__call__ - if self.call_args["with"]: return - - # run and block - if stdin: stdin = stdin.encode("utf8") - self._stdout, self._stderr = self.process.communicate(stdin) - self._handle_exit_code(self.process.wait()) - - def __enter__(self): - # we don't actually do anything here because anything that should - # have been done would have been done in the Command.__call__ call. - # essentially all that has to happen is the comand be pushed on - # the prepend stack. - pass - - def __exit__(self, typ, value, traceback): - if self.call_args["with"] and Command._prepend_stack: - Command._prepend_stack.pop() - - def __str__(self): - if IS_PY3: return self.__unicode__() - else: return unicode(self).encode("utf8") - - def __unicode__(self): - if self.process: - if self.call_args["bg"]: self.wait() - if self._stdout: return self.stdout - else: return "" - - def __eq__(self, other): - return unicode(self) == unicode(other) - - def __contains__(self, item): - return item in str(self) - - def __getattr__(self, p): - # let these three attributes pass through to the Popen object - if p in ("send_signal", "terminate", "kill"): - if self.process: return getattr(self.process, p) - else: raise AttributeError - return getattr(unicode(self), p) - - def __repr__(self): - return " ") - except (ValueError, EOFError): break - - try: exec(compile(line, "", "single"), env, env) - except SystemExit: break - except: print(traceback.format_exc()) - - # cleans up our last line - print("") - - - - -# this is a thin wrapper around THIS module (we patch sys.modules[__name__]). -# this is in the case that the user does a "from pbs import whatever" -# in other words, they only want to import certain programs, not the whole -# system PATH worth of commands. in this case, we just proxy the -# import lookup to our Environment class -class SelfWrapper(ModuleType): - def __init__(self, self_module): - # this is super ugly to have to copy attributes like this, - # but it seems to be the only way to make reload() behave - # nicely. if i make these attributes dynamic lookups in - # __getattr__, reload sometimes chokes in weird ways... - for attr in ["__builtins__", "__doc__", "__name__", "__package__"]: - setattr(self, attr, getattr(self_module, attr)) - - self.self_module = self_module - self.env = Environment(globals()) - - def __getattr__(self, name): - return self.env[name] - - - - - -# we're being run as a stand-alone script, fire up a REPL -if __name__ == "__main__": - globs = globals() - f_globals = {} - for k in ["__builtins__", "__doc__", "__name__", "__package__"]: - f_globals[k] = globs[k] - env = Environment(f_globals) - run_repl(env) - -# we're being imported from somewhere -else: - self = sys.modules[__name__] - sys.modules[__name__] = SelfWrapper(self) diff --git a/src/rez/vendor/sh/LICENSE.txt b/src/rez/vendor/sh/LICENSE.txt deleted file mode 100644 index a5d3182e5..000000000 --- a/src/rez/vendor/sh/LICENSE.txt +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (C) 2011-2012 by Andrew Moffat - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/src/rez/vendor/sh/__init__.py b/src/rez/vendor/sh/__init__.py deleted file mode 100644 index 1ec0cb7fb..000000000 --- a/src/rez/vendor/sh/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# putting rez-specific code here, because this file wouldn't exist in a -# "normal" distribution of sh - -from rez.config import config -from . import sh -sh.ErrorReturnCode.truncate_cap = config.shell_error_truncate_cap \ No newline at end of file diff --git a/src/rez/vendor/sh/sh.py b/src/rez/vendor/sh/sh.py deleted file mode 100644 index 8f5adc0f3..000000000 --- a/src/rez/vendor/sh/sh.py +++ /dev/null @@ -1,1773 +0,0 @@ -#=============================================================================== -# Copyright (C) 2011-2012 by Andrew Moffat -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. -#=============================================================================== - - -__version__ = "1.09" -__project_url__ = "https://github.com/amoffat/sh" - - - -import platform - -if "windows" in platform.system().lower(): - raise ImportError("sh %s is currently only supported on linux and osx. \ -please install pbs 0.110 (http://pypi.python.org/pypi/pbs) for windows \ -support." % __version__) - - - -import sys -IS_PY3 = sys.version_info[0] == 3 - -import traceback -import os -import re -from glob import glob as original_glob -from types import ModuleType -from functools import partial -import inspect -import time as _time - -from locale import getpreferredencoding -DEFAULT_ENCODING = getpreferredencoding() or "utf-8" - - -if IS_PY3: - from io import StringIO - from io import BytesIO as cStringIO - from queue import Queue, Empty -else: - from StringIO import StringIO - from cStringIO import OutputType as cStringIO - from Queue import Queue, Empty - -IS_OSX = platform.system() == "Darwin" -THIS_DIR = os.path.dirname(os.path.realpath(__file__)) - - -import errno -import warnings - -import pty -import termios -import signal -import gc -import select -import atexit -import threading -import tty -import fcntl -import struct -import resource -from collections import deque -import logging -import weakref - - -logging_enabled = False - - -if IS_PY3: - raw_input = input - unicode = str - basestring = str - - -def encode_to_py3bytes_or_py2str(s): - """ takes anything and attempts to return a py2 string or py3 bytes. this - is typically used when creating command + arguments to be executed via - os.exec* """ - - fallback_encoding = "utf8" - - if IS_PY3: - s = str(s) - try: - s = bytes(s, DEFAULT_ENCODING) - except UnicodeEncodeError: - s = bytes(s, fallback_encoding) - else: - # attempt to convert the thing to unicode from the system's encoding - try: - s = unicode(s, DEFAULT_ENCODING) - # if the thing is already unicode, or it's a number, it can't be - # coerced to unicode with an encoding argument, but if we leave out - # the encoding argument, it will convert it to a string, then to unicode - except TypeError: - s = unicode(s) - - # now that we have guaranteed unicode, encode to our system encoding, - # but attempt to fall back to something - try: - s = s.encode(DEFAULT_ENCODING) - except: - s = s.encode(fallback_encoding) - return s - - -class ErrorReturnCode(Exception): - truncate_cap = 750 - - def __init__(self, full_cmd, stdout, stderr): - self.full_cmd = full_cmd - self.stdout = stdout - self.stderr = stderr - - def truncate(output, name): - if not self.truncate_cap: - return output - truncated_output = output[:self.truncate_cap] - delta = len(output) - len(truncated_output) - if delta: - truncated_output += ( - "... (%d more, please see e.%s)" % (delta, name)).encode() - return truncated_output - - if self.stdout is None: exc_stdout = "" - else: - exc_stdout = truncate(self.stdout, 'stdout') - - if self.stderr is None: exc_stderr = "" - else: - exc_stderr = truncate(self.stderr, 'stderr') - - msg = "\n\n RAN: %r\n\n STDOUT:\n%s\n\n STDERR:\n%s" % \ - (full_cmd, exc_stdout.decode(DEFAULT_ENCODING, "replace"), - exc_stderr.decode(DEFAULT_ENCODING, "replace")) - super(ErrorReturnCode, self).__init__(msg) - - -class SignalException(ErrorReturnCode): pass - -SIGNALS_THAT_SHOULD_THROW_EXCEPTION = ( - signal.SIGKILL, - signal.SIGSEGV, - signal.SIGTERM, - signal.SIGINT, - signal.SIGQUIT -) - - -# we subclass AttributeError because: -# https://github.com/ipython/ipython/issues/2577 -# https://github.com/amoffat/sh/issues/97#issuecomment-10610629 -class CommandNotFound(AttributeError): pass - -rc_exc_regex = re.compile("(ErrorReturnCode|SignalException)_(\d+)") -rc_exc_cache = {} - -def get_rc_exc(rc): - rc = int(rc) - try: return rc_exc_cache[rc] - except KeyError: pass - - if rc > 0: - name = "ErrorReturnCode_%d" % rc - exc = type(name, (ErrorReturnCode,), {"exit_code": rc}) - else: - name = "SignalException_%d" % abs(rc) - exc = type(name, (SignalException,), {"exit_code": rc}) - - rc_exc_cache[rc] = exc - return exc - - - - -def which(program): - def is_exe(fpath): - return os.path.exists(fpath) and os.access(fpath, os.X_OK) - - fpath, fname = os.path.split(program) - if fpath: - if is_exe(program): return program - else: - if "PATH" not in os.environ: return None - for path in os.environ["PATH"].split(os.pathsep): - exe_file = os.path.join(path, program) - if is_exe(exe_file): - return exe_file - - return None - -def resolve_program(program): - path = which(program) - if not path: - # our actual command might have a dash in it, but we can't call - # that from python (we have to use underscores), so we'll check - # if a dash version of our underscore command exists and use that - # if it does - if "_" in program: path = which(program.replace("_", "-")) - if not path: return None - return path - - -# we add this thin wrapper to glob.glob because of a specific edge case where -# glob does not expand to anything. for example, if you try to do -# glob.glob("*.py") and there are no *.py files in the directory, glob.glob -# returns an empty list. this empty list gets passed to the command, and -# then the command fails with a misleading error message. this thin wrapper -# ensures that if there is no expansion, we pass in the original argument, -# so that when the command fails, the error message is clearer -def glob(arg): - return original_glob(arg) or arg - - - -class Logger(object): - def __init__(self, name, context=None): - self.name = name - self.context = "%s" - if context: self.context = "%s: %%s" % context - self.log = logging.getLogger(name) - - def info(self, msg, *args): - if not logging_enabled: return - self.log.info(self.context, msg % args) - - def debug(self, msg, *args): - if not logging_enabled: return - self.log.debug(self.context, msg % args) - - def error(self, msg, *args): - if not logging_enabled: return - self.log.error(self.context, msg % args) - - def exception(self, msg, *args): - if not logging_enabled: return - self.log.exception(self.context, msg % args) - - - -class RunningCommand(object): - def __init__(self, cmd, call_args, stdin, stdout, stderr): - truncate = 20 - if len(cmd) > truncate: - logger_str = "command %r...(%d more) call_args %r" % \ - (cmd[:truncate], len(cmd) - truncate, call_args) - else: - logger_str = "command %r call_args %r" % (cmd, call_args) - - self.log = Logger("command", logger_str) - self.call_args = call_args - self.cmd = cmd - - # self.ran is used for auditing what actually ran. for example, in - # exceptions, or if you just want to know what was ran after the - # command ran - if IS_PY3: - self.ran = " ".join([arg.decode(DEFAULT_ENCODING, "ignore") for arg in cmd]) - else: - self.ran = " ".join(cmd) - - self.process = None - - # this flag is for whether or not we've handled the exit code (like - # by raising an exception). this is necessary because .wait() is called - # from multiple places, and wait() triggers the exit code to be - # processed. but we don't want to raise multiple exceptions, only - # one (if any at all) - self._handled_exit_code = False - - self.should_wait = True - spawn_process = True - - - # with contexts shouldn't run at all yet, they prepend - # to every command in the context - if call_args["with"]: - spawn_process = False - Command._prepend_stack.append(self) - - - if callable(call_args["out"]) or callable(call_args["err"]): - self.should_wait = False - - if call_args["piped"] or call_args["iter"] or call_args["iter_noblock"]: - self.should_wait = False - - # we're running in the background, return self and let us lazily - # evaluate - if call_args["bg"]: self.should_wait = False - - # redirection - if call_args["err_to_out"]: stderr = STDOUT - - - # set up which stream should write to the pipe - # TODO, make pipe None by default and limit the size of the Queue - # in oproc.OProc - pipe = STDOUT - if call_args["iter"] == "out" or call_args["iter"] is True: pipe = STDOUT - elif call_args["iter"] == "err": pipe = STDERR - - if call_args["iter_noblock"] == "out" or call_args["iter_noblock"] is True: pipe = STDOUT - elif call_args["iter_noblock"] == "err": pipe = STDERR - - - if spawn_process: - self.log.debug("starting process") - self.process = OProc(cmd, stdin, stdout, stderr, - self.call_args, pipe=pipe) - - if self.should_wait: - self.wait() - - - def wait(self): - self._handle_exit_code(self.process.wait()) - return self - - # here we determine if we had an exception, or an error code that we weren't - # expecting to see. if we did, we create and raise an exception - def _handle_exit_code(self, code): - if self._handled_exit_code: return - self._handled_exit_code = True - - if code not in self.call_args["ok_code"] and \ - (code > 0 or -code in SIGNALS_THAT_SHOULD_THROW_EXCEPTION): - raise get_rc_exc(code)( - self.ran, - self.process.stdout, - self.process.stderr - ) - - - - @property - def stdout(self): - self.wait() - return self.process.stdout - - @property - def stderr(self): - self.wait() - return self.process.stderr - - @property - def exit_code(self): - self.wait() - return self.process.exit_code - - @property - def pid(self): - return self.process.pid - - def __len__(self): - return len(str(self)) - - def __enter__(self): - # we don't actually do anything here because anything that should - # have been done would have been done in the Command.__call__ call. - # essentially all that has to happen is the comand be pushed on - # the prepend stack. - pass - - def __iter__(self): - return self - - def next(self): - # we do this because if get blocks, we can't catch a KeyboardInterrupt - # so the slight timeout allows for that. - while True: - try: chunk = self.process._pipe_queue.get(True, 0.001) - except Empty: - if self.call_args["iter_noblock"]: return errno.EWOULDBLOCK - else: - if chunk is None: - self.wait() - raise StopIteration() - try: return chunk.decode(self.call_args["encoding"], - self.call_args["decode_errors"]) - except UnicodeDecodeError: return chunk - - # python 3 - __next__ = next - - def __exit__(self, typ, value, traceback): - if self.call_args["with"] and Command._prepend_stack: - Command._prepend_stack.pop() - - def __str__(self): - if IS_PY3: return self.__unicode__() - else: return unicode(self).encode(self.call_args["encoding"]) - - def __unicode__(self): - if self.process and self.stdout: - return self.stdout.decode(self.call_args["encoding"], - self.call_args["decode_errors"]) - return "" - - def __eq__(self, other): - return unicode(self) == unicode(other) - - def __contains__(self, item): - return item in str(self) - - def __getattr__(self, p): - # let these three attributes pass through to the OProc object - if p in ("signal", "terminate", "kill"): - if self.process: return getattr(self.process, p) - else: raise AttributeError - return getattr(unicode(self), p) - - def __repr__(self): - try: return str(self) - except UnicodeDecodeError: - if self.process: - if self.stdout: return repr(self.stdout) - return repr("") - - def __long__(self): - return long(str(self).strip()) - - def __float__(self): - return float(str(self).strip()) - - def __int__(self): - return int(str(self).strip()) - - - - - -class Command(object): - _prepend_stack = [] - - _call_args = { - # currently unsupported - #"fg": False, # run command in foreground - - "bg": False, # run command in background - "with": False, # prepend the command to every command after it - "in": None, - "out": None, # redirect STDOUT - "err": None, # redirect STDERR - "err_to_out": None, # redirect STDERR to STDOUT - - # stdin buffer size - # 1 for line, 0 for unbuffered, any other number for that amount - "in_bufsize": 0, - # stdout buffer size, same values as above - "out_bufsize": 1, - "err_bufsize": 1, - - # this is how big the output buffers will be for stdout and stderr. - # this is essentially how much output they will store from the process. - # we use a deque, so if it overflows past this amount, the first items - # get pushed off as each new item gets added. - # - # NOTICE - # this is not a *BYTE* size, this is a *CHUNK* size...meaning, that if - # you're buffering out/err at 1024 bytes, the internal buffer size will - # be "internal_bufsize" CHUNKS of 1024 bytes - "internal_bufsize": 3 * 1024 ** 2, - - "env": None, - "piped": None, - "iter": None, - "iter_noblock": None, - "ok_code": 0, - "cwd": None, - - # the separator delimiting between a long-argument's name and its value - # for example, --arg=derp, '=' is the long_sep - "long_sep": "=", - - # this is for programs that expect their input to be from a terminal. - # ssh is one of those programs - "tty_in": False, - "tty_out": True, - - "encoding": DEFAULT_ENCODING, - "decode_errors": "strict", - - # how long the process should run before it is auto-killed - "timeout": 0, - - # these control whether or not stdout/err will get aggregated together - # as the process runs. this has memory usage implications, so sometimes - # with long-running processes with a lot of data, it makes sense to - # set these to true - "no_out": False, - "no_err": False, - "no_pipe": False, - - # if any redirection is used for stdout or stderr, internal buffering - # of that data is not stored. this forces it to be stored, as if - # the output is being T'd to both the redirected destination and our - # internal buffers - "tee": None, - } - - # these are arguments that cannot be called together, because they wouldn't - # make any sense - _incompatible_call_args = ( - #("fg", "bg", "Command can't be run in the foreground and background"), - ("err", "err_to_out", "Stderr is already being redirected"), - ("piped", "iter", "You cannot iterate when this command is being piped"), - ) - - - # this method exists because of the need to have some way of letting - # manual object instantiation not perform the underscore-to-dash command - # conversion that resolve_program uses. - # - # there are 2 ways to create a Command object. using sh.Command() - # or by using sh.. the method fed into sh.Command must be taken - # literally, and so no underscore-dash conversion is performed. the one - # for sh. must do the underscore-dash converesion, because we - # can't type dashes in method names - @classmethod - def _create(cls, program, **default_kwargs): - path = resolve_program(program) - if not path: raise CommandNotFound(program) - - cmd = cls(path) - if default_kwargs: - cmd = cmd.bake(**default_kwargs) - - return cmd - - - def __init__(self, path): - path = which(path) - if not path: - raise CommandNotFound(path) - self._path = path - - self._partial = False - self._partial_baked_args = [] - self._partial_call_args = {} - - # bugfix for functools.wraps. issue #121 - self.__name__ = repr(self) - - - def __getattribute__(self, name): - # convenience - getattr = partial(object.__getattribute__, self) - - if name.startswith("_"): return getattr(name) - if name == "bake": return getattr("bake") - if name.endswith("_"): name = name[:-1] - - return getattr("bake")(name) - - - @staticmethod - def _extract_call_args(kwargs, to_override={}): - kwargs = kwargs.copy() - call_args = {} - for parg, default in Command._call_args.items(): - key = "_" + parg - - if key in kwargs: - call_args[parg] = kwargs[key] - del kwargs[key] - elif parg in to_override: - call_args[parg] = to_override[parg] - - # test for incompatible call args - s1 = set(call_args.keys()) - for args in Command._incompatible_call_args: - args = list(args) - error = args.pop() - - if s1.issuperset(args): - raise TypeError("Invalid special arguments %r: %s" % (args, error)) - - return call_args, kwargs - - - def _aggregate_keywords(self, keywords, sep, raw=False): - processed = [] - for k, v in keywords.items(): - # we're passing a short arg as a kwarg, example: - # cut(d="\t") - if len(k) == 1: - if v is not False: - processed.append(encode_to_py3bytes_or_py2str("-" + k)) - if v is not True: - processed.append(encode_to_py3bytes_or_py2str(v)) - - # we're doing a long arg - else: - if not raw: - k = k.replace("_", "-") - - if v is True: - processed.append(encode_to_py3bytes_or_py2str("--" + k)) - elif v is False: - pass - else: - arg = encode_to_py3bytes_or_py2str("--%s%s%s" % (k, sep, v)) - processed.append(arg) - return processed - - - def _compile_args(self, args, kwargs, sep): - processed_args = [] - - # aggregate positional args - for arg in args: - if isinstance(arg, (list, tuple)): - if not arg: - warnings.warn("Empty list passed as an argument to %r. \ -If you're using glob.glob(), please use sh.glob() instead." % self._path, stacklevel=3) - for sub_arg in arg: - processed_args.append(encode_to_py3bytes_or_py2str(sub_arg)) - elif isinstance(arg, dict): - processed_args += self._aggregate_keywords(arg, sep, raw=True) - else: - processed_args.append(encode_to_py3bytes_or_py2str(arg)) - - # aggregate the keyword arguments - processed_args += self._aggregate_keywords(kwargs, sep) - - return processed_args - - - # TODO needs documentation - def bake(self, *args, **kwargs): - fn = Command(self._path) - fn._partial = True - - call_args, kwargs = self._extract_call_args(kwargs) - - pruned_call_args = call_args - for k, v in Command._call_args.items(): - try: - if pruned_call_args[k] == v: - del pruned_call_args[k] - except KeyError: continue - - fn._partial_call_args.update(self._partial_call_args) - fn._partial_call_args.update(pruned_call_args) - fn._partial_baked_args.extend(self._partial_baked_args) - sep = pruned_call_args.get("long_sep", self._call_args["long_sep"]) - fn._partial_baked_args.extend(self._compile_args(args, kwargs, sep)) - return fn - - def __str__(self): - if IS_PY3: - return self.__unicode__() - else: - return unicode(self).encode(DEFAULT_ENCODING) - - - def __eq__(self, other): - try: return str(self) == str(other) - except: return False - - - def __repr__(self): - return "" % str(self) - - - def __unicode__(self): - baked_args = " ".join(self._partial_baked_args) - if baked_args: - baked_args = " " + baked_args - return self._path + baked_args - - def __enter__(self): - self(_with=True) - - def __exit__(self, typ, value, traceback): - Command._prepend_stack.pop() - - - def __call__(self, *args, **kwargs): - kwargs = kwargs.copy() - args = list(args) - - cmd = [] - - # aggregate any 'with' contexts - call_args = Command._call_args.copy() - for prepend in self._prepend_stack: - # don't pass the 'with' call arg - pcall_args = prepend.call_args.copy() - try: del pcall_args["with"] - except: pass - - call_args.update(pcall_args) - cmd.extend(prepend.cmd) - - if IS_PY3: - cmd.append(bytes(self._path, call_args["encoding"])) - else: - cmd.append(self._path) - - # here we extract the special kwargs and override any - # special kwargs from the possibly baked command - tmp_call_args, kwargs = self._extract_call_args(kwargs, self._partial_call_args) - call_args.update(tmp_call_args) - - if not isinstance(call_args["ok_code"], (tuple, list)): - call_args["ok_code"] = [call_args["ok_code"]] - - - # check if we're piping via composition - stdin = call_args["in"] - if args: - first_arg = args.pop(0) - if isinstance(first_arg, RunningCommand): - # it makes sense that if the input pipe of a command is running - # in the background, then this command should run in the - # background as well - if first_arg.call_args["bg"]: call_args["bg"] = True - stdin = first_arg.process._pipe_queue - - else: - args.insert(0, first_arg) - - processed_args = self._compile_args(args, kwargs, call_args["long_sep"]) - - # makes sure our arguments are broken up correctly - split_args = self._partial_baked_args + processed_args - - final_args = split_args - - cmd.extend(final_args) - - - # stdout redirection - stdout = call_args["out"] - if stdout \ - and not callable(stdout) \ - and not hasattr(stdout, "write") \ - and not isinstance(stdout, (cStringIO, StringIO)): - - stdout = open(str(stdout), "wb") - - - # stderr redirection - stderr = call_args["err"] - if stderr and not callable(stderr) and not hasattr(stderr, "write") \ - and not isinstance(stderr, (cStringIO, StringIO)): - stderr = open(str(stderr), "wb") - - - return RunningCommand(cmd, call_args, stdin, stdout, stderr) - - - - -# used in redirecting -STDOUT = -1 -STDERR = -2 - - - -# Process open = Popen -# Open Process = OProc -class OProc(object): - _procs_to_cleanup = set() - _registered_cleanup = False - _default_window_size = (24, 80) - - def __init__(self, cmd, stdin, stdout, stderr, call_args, - persist=True, pipe=STDOUT): - - self.call_args = call_args - - self._single_tty = self.call_args["tty_in"] and self.call_args["tty_out"] - - # this logic is a little convoluted, but basically this top-level - # if/else is for consolidating input and output TTYs into a single - # TTY. this is the only way some secure programs like ssh will - # output correctly (is if stdout and stdin are both the same TTY) - if self._single_tty: - self._stdin_fd, self._slave_stdin_fd = pty.openpty() - - self._stdout_fd = self._stdin_fd - self._slave_stdout_fd = self._slave_stdin_fd - - self._stderr_fd = self._stdin_fd - self._slave_stderr_fd = self._slave_stdin_fd - - # do not consolidate stdin and stdout - else: - if self.call_args["tty_in"]: - self._slave_stdin_fd, self._stdin_fd = pty.openpty() - else: - self._slave_stdin_fd, self._stdin_fd = os.pipe() - - # tty_out is usually the default - if self.call_args["tty_out"]: - self._stdout_fd, self._slave_stdout_fd = pty.openpty() - else: - self._stdout_fd, self._slave_stdout_fd = os.pipe() - - # unless STDERR is going to STDOUT, it ALWAYS needs to be a pipe, - # and never a PTY. the reason for this is not totally clear to me, - # but it has to do with the fact that if STDERR isn't set as the - # CTTY (because STDOUT is), the STDERR buffer won't always flush - # by the time the process exits, and the data will be lost. - # i've only seen this on OSX. - if stderr is not STDOUT: - self._stderr_fd, self._slave_stderr_fd = os.pipe() - - gc_enabled = gc.isenabled() - if gc_enabled: gc.disable() - self.pid = os.fork() - - - # child - if self.pid == 0: - # ignoring SIGHUP lets us persist even after the parent process - # exits - signal.signal(signal.SIGHUP, signal.SIG_IGN) - - # this piece of ugliness is due to a bug where we can lose output - # if we do os.close(self._slave_stdout_fd) in the parent after - # the child starts writing. - # see http://bugs.python.org/issue15898 - if IS_OSX: - _time.sleep(0.01) - - os.setsid() - - if self.call_args["tty_out"]: - # set raw mode, so there isn't any weird translation of newlines - # to \r\n and other oddities. we're not outputting to a terminal - # anyways - # - # we HAVE to do this here, and not in the parent thread, because - # we have to guarantee that this is set before the child process - # is run, and we can't do it twice. - tty.setraw(self._stdout_fd) - - - os.close(self._stdin_fd) - if not self._single_tty: - os.close(self._stdout_fd) - if stderr is not STDOUT: os.close(self._stderr_fd) - - - if self.call_args["cwd"]: os.chdir(self.call_args["cwd"]) - os.dup2(self._slave_stdin_fd, 0) - os.dup2(self._slave_stdout_fd, 1) - - # we're not directing stderr to stdout? then set self._slave_stderr_fd to - # fd 2, the common stderr fd - if stderr is STDOUT: os.dup2(self._slave_stdout_fd, 2) - else: os.dup2(self._slave_stderr_fd, 2) - - # don't inherit file descriptors - max_fd = resource.getrlimit(resource.RLIMIT_NOFILE)[0] - os.closerange(3, max_fd) - - - # set our controlling terminal - if self.call_args["tty_out"]: - tmp_fd = os.open(os.ttyname(1), os.O_RDWR) - os.close(tmp_fd) - - - if self.call_args["tty_out"]: - self.setwinsize(1) - - # actually execute the process - if self.call_args["env"] is None: - os.execv(cmd[0], cmd) - else: - os.execve(cmd[0], cmd, self.call_args["env"]) - - os._exit(255) - - # parent - else: - if gc_enabled: gc.enable() - - if not OProc._registered_cleanup: - atexit.register(OProc._cleanup_procs) - OProc._registered_cleanup = True - - - self.started = _time.time() - self.cmd = cmd - self.exit_code = None - - self.stdin = stdin or Queue() - self._pipe_queue = Queue() - - # this is used to prevent a race condition when we're waiting for - # a process to end, and the OProc's internal threads are also checking - # for the processes's end - self._wait_lock = threading.Lock() - - # these are for aggregating the stdout and stderr. we use a deque - # because we don't want to overflow - self._stdout = deque(maxlen=self.call_args["internal_bufsize"]) - self._stderr = deque(maxlen=self.call_args["internal_bufsize"]) - - if self.call_args["tty_in"]: self.setwinsize(self._stdin_fd) - - - self.log = Logger("process", repr(self)) - - os.close(self._slave_stdin_fd) - if not self._single_tty: - os.close(self._slave_stdout_fd) - if stderr is not STDOUT: os.close(self._slave_stderr_fd) - - self.log.debug("started process") - if not persist: - OProc._procs_to_cleanup.add(self) - - - if self.call_args["tty_in"]: - attr = termios.tcgetattr(self._stdin_fd) - attr[3] &= ~termios.ECHO - termios.tcsetattr(self._stdin_fd, termios.TCSANOW, attr) - - # this represents the connection from a Queue object (or whatever - # we're using to feed STDIN) to the process's STDIN fd - self._stdin_stream = StreamWriter("stdin", self, self._stdin_fd, - self.stdin, self.call_args["in_bufsize"]) - - - stdout_pipe = None - if pipe is STDOUT and not self.call_args["no_pipe"]: - stdout_pipe = self._pipe_queue - - # this represents the connection from a process's STDOUT fd to - # wherever it has to go, sometimes a pipe Queue (that we will use - # to pipe data to other processes), and also an internal deque - # that we use to aggregate all the output - save_stdout = not self.call_args["no_out"] and \ - (self.call_args["tee"] in (True, "out") or stdout is None) - self._stdout_stream = StreamReader("stdout", self, self._stdout_fd, stdout, - self._stdout, self.call_args["out_bufsize"], stdout_pipe, - save_data=save_stdout) - - - if stderr is STDOUT or self._single_tty: self._stderr_stream = None - else: - stderr_pipe = None - if pipe is STDERR and not self.call_args["no_pipe"]: - stderr_pipe = self._pipe_queue - - save_stderr = not self.call_args["no_err"] and \ - (self.call_args["tee"] in ("err",) or stderr is None) - self._stderr_stream = StreamReader("stderr", self, self._stderr_fd, stderr, - self._stderr, self.call_args["err_bufsize"], stderr_pipe, - save_data=save_stderr) - - # start the main io threads - self._input_thread = self._start_thread(self.input_thread, self._stdin_stream) - self._output_thread = self._start_thread(self.output_thread, self._stdout_stream, self._stderr_stream) - - - def __repr__(self): - return "" % (self.pid, self.cmd[:500]) - - - # also borrowed from pexpect.py - @staticmethod - def setwinsize(fd): - rows, cols = OProc._default_window_size - TIOCSWINSZ = getattr(termios, 'TIOCSWINSZ', -2146929561) - if TIOCSWINSZ == 2148037735: # L is not required in Python >= 2.2. - TIOCSWINSZ = -2146929561 # Same bits, but with sign. - - s = struct.pack('HHHH', rows, cols, 0, 0) - fcntl.ioctl(fd, TIOCSWINSZ, s) - - - @staticmethod - def _start_thread(fn, *args): - thrd = threading.Thread(target=fn, args=args) - thrd.daemon = True - thrd.start() - return thrd - - def in_bufsize(self, buf): - self._stdin_stream.stream_bufferer.change_buffering(buf) - - def out_bufsize(self, buf): - self._stdout_stream.stream_bufferer.change_buffering(buf) - - def err_bufsize(self, buf): - if self._stderr_stream: - self._stderr_stream.stream_bufferer.change_buffering(buf) - - - def input_thread(self, stdin): - done = False - while not done and self.alive: - self.log.debug("%r ready for more input", stdin) - done = stdin.write() - - stdin.close() - - - def output_thread(self, stdout, stderr): - readers = [] - errors = [] - - if stdout is not None: - readers.append(stdout) - errors.append(stdout) - if stderr is not None: - readers.append(stderr) - errors.append(stderr) - - while readers: - outputs, inputs, err = select.select(readers, [], errors, 0.1) - - # stdout and stderr - for stream in outputs: - self.log.debug("%r ready to be read from", stream) - done = stream.read() - if done: readers.remove(stream) - - for stream in err: - pass - - # test if the process has been running too long - if self.call_args["timeout"]: - now = _time.time() - if now - self.started > self.call_args["timeout"]: - self.log.debug("we've been running too long") - self.kill() - - - # this is here because stdout may be the controlling TTY, and - # we can't close it until the process has ended, otherwise the - # child will get SIGHUP. typically, if we've broken out of - # the above loop, and we're here, the process is just about to - # end, so it's probably ok to aggressively poll self.alive - # - # the other option to this would be to do the CTTY close from - # the method that does the actual os.waitpid() call, but the - # problem with that is that the above loop might still be - # running, and closing the fd will cause some operation to - # fail. this is less complex than wrapping all the ops - # in the above loop with out-of-band fd-close exceptions - while self.alive: - _time.sleep(0.001) - - if stdout: - stdout.close() - - if stderr: - stderr.close() - - - @property - def stdout(self): - return "".encode(self.call_args["encoding"]).join(self._stdout) - - @property - def stderr(self): - return "".encode(self.call_args["encoding"]).join(self._stderr) - - - def signal(self, sig): - self.log.debug("sending signal %d", sig) - try: os.kill(self.pid, sig) - except OSError: pass - - def kill(self): - self.log.debug("killing") - self.signal(signal.SIGKILL) - - def terminate(self): - self.log.debug("terminating") - self.signal(signal.SIGTERM) - - @staticmethod - def _cleanup_procs(): - for proc in OProc._procs_to_cleanup: - proc.kill() - - - def _handle_exit_code(self, exit_code): - # if we exited from a signal, let our exit code reflect that - if os.WIFSIGNALED(exit_code): return -os.WTERMSIG(exit_code) - # otherwise just give us a normal exit code - elif os.WIFEXITED(exit_code): return os.WEXITSTATUS(exit_code) - else: raise RuntimeError("Unknown child exit status!") - - @property - def alive(self): - if self.exit_code is not None: return False - - # what we're doing here essentially is making sure that the main thread - # (or another thread), isn't calling .wait() on the process. because - # .wait() calls os.waitpid(self.pid, 0), we can't do an os.waitpid - # here...because if we did, and the process exited while in this - # thread, the main thread's os.waitpid(self.pid, 0) would raise OSError - # (because the process ended in another thread). - # - # so essentially what we're doing is, using this lock, checking if - # we're calling .wait(), and if we are, let .wait() get the exit code - # and handle the status, otherwise let us do it. - acquired = self._wait_lock.acquire(False) - if not acquired: - if self.exit_code is not None: return False - return True - - try: - # WNOHANG is just that...we're calling waitpid without hanging... - # essentially polling the process - pid, exit_code = os.waitpid(self.pid, os.WNOHANG) - if pid == self.pid: - self.exit_code = self._handle_exit_code(exit_code) - return False - - # no child process - except OSError: return False - else: return True - finally: self._wait_lock.release() - - - def wait(self): - self.log.debug("acquiring wait lock to wait for completion") - with self._wait_lock: - self.log.debug("got wait lock") - - if self.exit_code is None: - self.log.debug("exit code not set, waiting on pid") - pid, exit_code = os.waitpid(self.pid, 0) - self.exit_code = self._handle_exit_code(exit_code) - else: - self.log.debug("exit code already set (%d), no need to wait", self.exit_code) - - self._input_thread.join() - self._output_thread.join() - - OProc._procs_to_cleanup.discard(self) - - return self.exit_code - - - - -class DoneReadingStdin(Exception): pass -class NoStdinData(Exception): pass - - - -# this guy is for reading from some input (the stream) and writing to our -# opened process's stdin fd. the stream can be a Queue, a callable, something -# with the "read" method, a string, or an iterable -class StreamWriter(object): - def __init__(self, name, process, stream, stdin, bufsize): - self.name = name - self.process = weakref.ref(process) - self.stream = stream - self.stdin = stdin - - self.log = Logger("streamwriter", repr(self)) - - - self.stream_bufferer = StreamBufferer(self.process().call_args["encoding"], - bufsize) - - # determine buffering for reading from the input we set for stdin - if bufsize == 1: self.bufsize = 1024 - elif bufsize == 0: self.bufsize = 1 - else: self.bufsize = bufsize - - - if isinstance(stdin, Queue): - log_msg = "queue" - self.get_chunk = self.get_queue_chunk - - elif callable(stdin): - log_msg = "callable" - self.get_chunk = self.get_callable_chunk - - # also handles stringio - elif hasattr(stdin, "read"): - log_msg = "file descriptor" - self.get_chunk = self.get_file_chunk - - elif isinstance(stdin, basestring): - log_msg = "string" - - if bufsize == 1: - # TODO, make the split() be a generator - self.stdin = iter((c + "\n" for c in stdin.split("\n"))) - else: - self.stdin = iter(stdin[i:i + self.bufsize] for i in range(0, len(stdin), self.bufsize)) - self.get_chunk = self.get_iter_chunk - - else: - log_msg = "general iterable" - self.stdin = iter(stdin) - self.get_chunk = self.get_iter_chunk - - self.log.debug("parsed stdin as a %s", log_msg) - - - def __repr__(self): - return "" % (self.name, self.process()) - - def fileno(self): - return self.stream - - def get_queue_chunk(self): - try: chunk = self.stdin.get(True, 0.01) - except Empty: raise NoStdinData - if chunk is None: raise DoneReadingStdin - return chunk - - def get_callable_chunk(self): - try: return self.stdin() - except: raise DoneReadingStdin - - def get_iter_chunk(self): - try: - if IS_PY3: return self.stdin.__next__() - else: return self.stdin.next() - except StopIteration: raise DoneReadingStdin - - def get_file_chunk(self): - if self.stream_bufferer.type == 1: chunk = self.stdin.readline() - else: chunk = self.stdin.read(self.bufsize) - if not chunk: raise DoneReadingStdin - else: return chunk - - - # the return value answers the questions "are we done writing forever?" - def write(self): - # get_chunk may sometimes return bytes, and sometimes returns trings - # because of the nature of the different types of STDIN objects we - # support - try: chunk = self.get_chunk() - except DoneReadingStdin: - self.log.debug("done reading") - - if self.process().call_args["tty_in"]: - # EOF time - try: char = termios.tcgetattr(self.stream)[6][termios.VEOF] - except: char = chr(4).encode() - os.write(self.stream, char) - - return True - - except NoStdinData: - self.log.debug("received no data") - return False - - # if we're not bytes, make us bytes - if IS_PY3 and hasattr(chunk, "encode"): - chunk = chunk.encode(self.process().call_args["encoding"]) - - for chunk in self.stream_bufferer.process(chunk): - self.log.debug("got chunk size %d: %r", len(chunk), chunk[:30]) - - self.log.debug("writing chunk to process") - try: - os.write(self.stream, chunk) - except OSError: - self.log.debug("OSError writing stdin chunk") - return True - - - def close(self): - self.log.debug("closing, but flushing first") - chunk = self.stream_bufferer.flush() - self.log.debug("got chunk size %d to flush: %r", len(chunk), chunk[:30]) - try: - if chunk: os.write(self.stream, chunk) - if not self.process().call_args["tty_in"]: - self.log.debug("we used a TTY, so closing the stream") - os.close(self.stream) - except OSError: pass - - - -class StreamReader(object): - def __init__(self, name, process, stream, handler, buffer, bufsize, - pipe_queue=None, save_data=True): - self.name = name - self.process = weakref.ref(process) - self.stream = stream - self.buffer = buffer - self.save_data = save_data - self.encoding = process.call_args["encoding"] - self.decode_errors = process.call_args["decode_errors"] - - self.pipe_queue = None - if pipe_queue: self.pipe_queue = weakref.ref(pipe_queue) - - self.log = Logger("streamreader", repr(self)) - - self.stream_bufferer = StreamBufferer(self.encoding, bufsize, - self.decode_errors) - - # determine buffering - if bufsize == 1: self.bufsize = 1024 - elif bufsize == 0: self.bufsize = 1 - else: self.bufsize = bufsize - - - # here we're determining the handler type by doing some basic checks - # on the handler object - self.handler = handler - if callable(handler): self.handler_type = "fn" - elif isinstance(handler, StringIO): self.handler_type = "stringio" - elif isinstance(handler, cStringIO): - self.handler_type = "cstringio" - elif hasattr(handler, "write"): self.handler_type = "fd" - else: self.handler_type = None - - - self.should_quit = False - - # here we choose how to call the callback, depending on how many - # arguments it takes. the reason for this is to make it as easy as - # possible for people to use, without limiting them. a new user will - # assume the callback takes 1 argument (the data). as they get more - # advanced, they may want to terminate the process, or pass some stdin - # back, and will realize that they can pass a callback of more args - if self.handler_type == "fn": - implied_arg = 0 - if inspect.ismethod(handler): - implied_arg = 1 - num_args = len(inspect.getargspec(handler).args) - - else: - if inspect.isfunction(handler): - num_args = len(inspect.getargspec(handler).args) - - # is an object instance with __call__ method - else: - implied_arg = 1 - num_args = len(inspect.getargspec(handler.__call__).args) - - - self.handler_args = () - if num_args == implied_arg + 2: - self.handler_args = (self.process().stdin,) - elif num_args == implied_arg + 3: - self.handler_args = (self.process().stdin, self.process) - - - def fileno(self): - return self.stream - - def __repr__(self): - return "" % (self.name, self.process()) - - def close(self): - chunk = self.stream_bufferer.flush() - self.log.debug("got chunk size %d to flush: %r", - len(chunk), chunk[:30]) - if chunk: self.write_chunk(chunk) - - if self.handler_type == "fd" and hasattr(self.handler, "close"): - self.handler.flush() - - if self.pipe_queue and self.save_data: self.pipe_queue().put(None) - try: os.close(self.stream) - except OSError: pass - - - def write_chunk(self, chunk): - # in PY3, the chunk coming in will be bytes, so keep that in mind - - if self.handler_type == "fn" and not self.should_quit: - # try to use the encoding first, if that doesn't work, send - # the bytes, because it might be binary - try: - to_handler = chunk.decode(self.encoding, self.decode_errors) - except UnicodeDecodeError: - to_handler = chunk - - # this is really ugly, but we can't store self.process as one of - # the handler args in self.handler_args, the reason being is that - # it would create cyclic references, and prevent objects from - # being garbage collected. so we're determining if this handler - # even requires self.process (by the argument count), and if it - # does, resolving the weakref to a hard reference and passing - # that into the handler - handler_args = self.handler_args - if len(self.handler_args) == 2: - handler_args = (self.handler_args[0], self.process()) - self.should_quit = self.handler(to_handler, *handler_args) - - elif self.handler_type == "stringio": - self.handler.write(chunk.decode(self.encoding, self.decode_errors)) - - elif self.handler_type in ("cstringio", "fd"): - self.handler.write(chunk) - - # we should flush on an fd. chunk is already the correctly-buffered - # size, so we don't need the fd buffering as well - self.handler.flush() - - if self.save_data: - self.buffer.append(chunk) - - if self.pipe_queue: - self.log.debug("putting chunk onto pipe: %r", chunk[:30]) - self.pipe_queue().put(chunk) - - - def read(self): - # if we're PY3, we're reading bytes, otherwise we're reading - # str - try: chunk = os.read(self.stream, self.bufsize) - except OSError as e: - self.log.debug("got errno %d, done reading", e.errno) - return True - if not chunk: - self.log.debug("got no chunk, done reading") - return True - - self.log.debug("got chunk size %d: %r", len(chunk), chunk[:30]) - for chunk in self.stream_bufferer.process(chunk): - self.write_chunk(chunk) - - - - -# this is used for feeding in chunks of stdout/stderr, and breaking it up into -# chunks that will actually be put into the internal buffers. for example, if -# you have two processes, one being piped to the other, and you want that, -# first process to feed lines of data (instead of the chunks however they -# come in), OProc will use an instance of this class to chop up the data and -# feed it as lines to be sent down the pipe -class StreamBufferer(object): - def __init__(self, encoding=DEFAULT_ENCODING, buffer_type=1, - decode_errors="strict"): - # 0 for unbuffered, 1 for line, everything else for that amount - self.type = buffer_type - self.buffer = [] - self.n_buffer_count = 0 - self.encoding = encoding - self.decode_errors = decode_errors - - # this is for if we change buffering types. if we change from line - # buffered to unbuffered, its very possible that our self.buffer list - # has data that was being saved up (while we searched for a newline). - # we need to use that up, so we don't lose it - self._use_up_buffer_first = False - - # the buffering lock is used because we might chance the buffering - # types from a different thread. for example, if we have a stdout - # callback, we might use it to change the way stdin buffers. so we - # lock - self._buffering_lock = threading.RLock() - self.log = Logger("stream_bufferer") - - - def change_buffering(self, new_type): - # TODO, when we stop supporting 2.6, make this a with context - self.log.debug("acquiring buffering lock for changing buffering") - self._buffering_lock.acquire() - self.log.debug("got buffering lock for changing buffering") - try: - if new_type == 0: self._use_up_buffer_first = True - - self.type = new_type - finally: - self._buffering_lock.release() - self.log.debug("released buffering lock for changing buffering") - - - def process(self, chunk): - # MAKE SURE THAT THE INPUT IS PY3 BYTES - # THE OUTPUT IS ALWAYS PY3 BYTES - - # TODO, when we stop supporting 2.6, make this a with context - self.log.debug("acquiring buffering lock to process chunk (buffering: %d)", self.type) - self._buffering_lock.acquire() - self.log.debug("got buffering lock to process chunk (buffering: %d)", self.type) - try: - # we've encountered binary, permanently switch to N size buffering - # since matching on newline doesn't make sense anymore - if self.type == 1: - try: chunk.decode(self.encoding, self.decode_errors) - except: - self.log.debug("detected binary data, changing buffering") - self.change_buffering(1024) - - # unbuffered - if self.type == 0: - if self._use_up_buffer_first: - self._use_up_buffer_first = False - to_write = self.buffer - self.buffer = [] - to_write.append(chunk) - return to_write - - return [chunk] - - # line buffered - elif self.type == 1: - total_to_write = [] - chunk = chunk.decode(self.encoding, self.decode_errors) - while True: - newline = chunk.find("\n") - if newline == -1: break - - chunk_to_write = chunk[:newline + 1] - if self.buffer: - # this is ugly, but it's designed to take the existing - # bytes buffer, join it together, tack on our latest - # chunk, then convert the whole thing to a string. - # it's necessary, i'm sure. read the whole block to - # see why. - chunk_to_write = "".encode(self.encoding).join(self.buffer) \ - + chunk_to_write.encode(self.encoding) - chunk_to_write = chunk_to_write.decode(self.encoding) - - self.buffer = [] - self.n_buffer_count = 0 - - chunk = chunk[newline + 1:] - total_to_write.append(chunk_to_write.encode(self.encoding)) - - if chunk: - self.buffer.append(chunk.encode(self.encoding)) - self.n_buffer_count += len(chunk) - return total_to_write - - # N size buffered - else: - total_to_write = [] - while True: - overage = self.n_buffer_count + len(chunk) - self.type - if overage >= 0: - ret = "".encode(self.encoding).join(self.buffer) + chunk - chunk_to_write = ret[:self.type] - chunk = ret[self.type:] - total_to_write.append(chunk_to_write) - self.buffer = [] - self.n_buffer_count = 0 - else: - self.buffer.append(chunk) - self.n_buffer_count += len(chunk) - break - return total_to_write - finally: - self._buffering_lock.release() - self.log.debug("released buffering lock for processing chunk (buffering: %d)", self.type) - - - def flush(self): - self.log.debug("acquiring buffering lock for flushing buffer") - self._buffering_lock.acquire() - self.log.debug("got buffering lock for flushing buffer") - try: - ret = "".encode(self.encoding).join(self.buffer) - self.buffer = [] - return ret - finally: - self._buffering_lock.release() - self.log.debug("released buffering lock for flushing buffer") - - - - - -# this allows lookups to names that aren't found in the global scope to be -# searched for as a program name. for example, if "ls" isn't found in this -# module's scope, we consider it a system program and try to find it. -# -# we use a dict instead of just a regular object as the base class because -# the exec() statement used in this file requires the "globals" argument to -# be a dictionary -class Environment(dict): - def __init__(self, globs, baked_args={}): - self.globs = globs - self.baked_args = baked_args - - def __setitem__(self, k, v): - self.globs[k] = v - - def __getitem__(self, k): - try: return self.globs[k] - except KeyError: pass - - # the only way we'd get to here is if we've tried to - # import * from a repl. so, raise an exception, since - # that's really the only sensible thing to do - if k == "__all__": - raise ImportError("Cannot import * from sh. \ -Please import sh or import programs individually.") - - # if we end with "_" just go ahead and skip searching - # our namespace for python stuff. this was mainly for the - # command "id", which is a popular program for finding - # if a user exists, but also a python function for getting - # the address of an object. so can call the python - # version by "id" and the program version with "id_" - if not k.endswith("_"): - # check if we're naming a dynamically generated ReturnCode exception - try: return rc_exc_cache[k] - except KeyError: - m = rc_exc_regex.match(k) - if m: - exit_code = int(m.group(2)) - if m.group(1) == "SignalException": - exit_code = -exit_code - return get_rc_exc(exit_code) - - # is it a builtin? - try: - return getattr(self["__builtins__"], k) - except AttributeError: - pass - elif not k.startswith("_"): - k = k.rstrip("_") - - - # https://github.com/ipython/ipython/issues/2577 - # https://github.com/amoffat/sh/issues/97#issuecomment-10610629 - if k.startswith("__") and k.endswith("__"): - raise AttributeError - - # how about an environment variable? - try: - return os.environ[k] - except KeyError: - pass - - # is it a custom builtin? - builtin = getattr(self, "b_" + k, None) - if builtin: return builtin - - # it must be a command then - # we use _create instead of instantiating the class directly because - # _create uses resolve_program, which will automatically do underscore- - # to-dash conversions. instantiating directly does not use that - return Command._create(k, **self.baked_args) - - - # methods that begin with "b_" are custom builtins and will override any - # program that exists in our path. this is useful for things like - # common shell builtins that people are used to, but which aren't actually - # full-fledged system binaries - - def b_cd(self, path): - os.chdir(path) - - def b_which(self, program): - return which(program) - - - - -def run_repl(env): - banner = "\n>> sh v{version}\n>> https://github.com/amoffat/sh\n" - - print(banner.format(version=__version__)) - while True: - try: line = raw_input("sh> ") - except (ValueError, EOFError): break - - try: exec(compile(line, "", "single"), env, env) - except SystemExit: break - except: print(traceback.format_exc()) - - # cleans up our last line - print("") - - - - -# this is a thin wrapper around THIS module (we patch sys.modules[__name__]). -# this is in the case that the user does a "from sh import whatever" -# in other words, they only want to import certain programs, not the whole -# system PATH worth of commands. in this case, we just proxy the -# import lookup to our Environment class -class SelfWrapper(ModuleType): - def __init__(self, self_module, baked_args={}): - # this is super ugly to have to copy attributes like this, - # but it seems to be the only way to make reload() behave - # nicely. if i make these attributes dynamic lookups in - # __getattr__, reload sometimes chokes in weird ways... - for attr in ["__builtins__", "__doc__", "__name__", "__package__"]: - setattr(self, attr, getattr(self_module, attr, None)) - - # python 3.2 (2.7 and 3.3 work fine) breaks on osx (not ubuntu) - # if we set this to None. and 3.3 needs a value for __path__ - self.__path__ = [] - self.self_module = self_module - self.env = Environment(globals(), baked_args) - - def __setattr__(self, name, value): - if hasattr(self, "env"): self.env[name] = value - ModuleType.__setattr__(self, name, value) - - def __getattr__(self, name): - if name == "env": raise AttributeError - return self.env[name] - - # accept special keywords argument to define defaults for all operations - # that will be processed with given by return SelfWrapper - def __call__(self, **kwargs): - return SelfWrapper(self.self_module, kwargs) - - - - -# we're being run as a stand-alone script -if __name__ == "__main__": - try: - arg = sys.argv.pop(1) - except: - arg = None - - if arg == "test": - import subprocess - - def run_test(version, locale): - py_version = "python%s" % version - py_bin = which(py_version) - - if py_bin: - print("Testing %s, locale %r" % (py_version.capitalize(), - locale)) - - env = os.environ.copy() - env["LC_ALL"] = locale - p = subprocess.Popen([py_bin, os.path.join(THIS_DIR, "test.py")] - + sys.argv[1:], env=env) - p.wait() - else: - print("Couldn't find %s, skipping" % py_version.capitalize()) - - versions = ("2.6", "2.7", "3.1", "3.2", "3.3") - locales = ("en_US.UTF-8", "C") - for locale in locales: - for version in versions: - run_test(version, locale) - - else: - env = Environment(globals()) - run_repl(env) - -# we're being imported from somewhere -else: - self = sys.modules[__name__] - sys.modules[__name__] = SelfWrapper(self) diff --git a/src/rezplugins/release_hook/command.py b/src/rezplugins/release_hook/command.py index c1f9dd2b2..d8f8ceb04 100644 --- a/src/rezplugins/release_hook/command.py +++ b/src/rezplugins/release_hook/command.py @@ -1,6 +1,12 @@ """ Executes pre- and post-release shell commands """ + +import getpass +import sys +import os +from subprocess import Popen, PIPE, STDOUT + from rez.release_hook import ReleaseHook from rez.exceptions import ReleaseHookCancellingError from rez.config import config @@ -9,15 +15,7 @@ from rez.utils.scope import scoped_formatter from rez.utils.formatting import expandvars from rez.vendor.schema.schema import Schema, Or, Optional, Use, And -import platform -if "windows" in platform.system().lower(): - from rez.vendor.pbs import Command, ErrorReturnCode, which - sudo = None -else: - from rez.vendor.sh.sh import Command, ErrorReturnCode, sudo, which -import getpass -import sys -import os +from rez.util import which class CommandReleaseHook(ReleaseHook): @@ -56,18 +54,18 @@ def _err(msg): kwargs = {} if env: - kwargs["_env"] = env - - def _execute(cmd, arguments): - try: - result = cmd(*(arguments or []), **kwargs) - if self.settings.print_output: - print result.stdout.strip() - except ErrorReturnCode as e: - # `e` shows the command that was run - msg = "command failed:\n%s" % str(e) + kwargs["env"] = env + + def _execute(commands): + process = Popen(commands, stdout=PIPE, stderr=STDOUT, **kwargs) + stdout, _ = process.communicate() + + if process.returncode != 0: + msg = "command failed:\n%s" % stdout _err(msg) return False + if self.settings.print_output: + print stdout.strip() return True if not os.path.isfile(cmd_name): @@ -79,14 +77,14 @@ def _execute(cmd, arguments): _err(msg) return False - run_cmd = Command(cmd_full_path) - if user == 'root' and sudo is not None: - with sudo: - return _execute(run_cmd, cmd_arguments) + cmds = [cmd_full_path] + (cmd_arguments or []) + if user == 'root': + cmds = ['sudo'] + cmds + return _execute(cmds) elif user and user != getpass.getuser(): raise NotImplementedError # TODO else: - return _execute(run_cmd, cmd_arguments) + return _execute(cmds) def pre_build(self, user, install_path, variants=None, **kwargs): errors = [] From a7a520adca379f62346194ffa0ac146469987394 Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 28 Nov 2017 15:01:02 +1100 Subject: [PATCH 101/124] testing travis --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 9bf1263ad..7be2789cf 100644 --- a/.travis.yml +++ b/.travis.yml @@ -11,4 +11,4 @@ install: script: # Running rez tests - - "../rez_install/bin/rez/rez-selftest" + - "../rez_install/bin/rez/rez-selftest -s bash" From b10cb5a1306b10c3b537f03211e9c193dbf731fd Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 28 Nov 2017 15:08:21 +1100 Subject: [PATCH 102/124] added travis badge to README --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index af2529f32..3f2ec3605 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,5 @@ +[![Build Status](https://travis-ci.org/nerdvegas/rez.svg?branch=master)](https://travis-ci.org/nerdvegas/rez) + ![logo](media/rez_banner_256.png) - [What Is Rez?](#what-is-rez) From 492f920ff3cbc71a51bcf21bc653fa1e2c33ac31 Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 28 Nov 2017 15:28:09 +1100 Subject: [PATCH 103/124] travis testing --- .travis.yml | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/.travis.yml b/.travis.yml index 7be2789cf..3388d0016 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,14 +1,17 @@ language: python -python: - - "2.7" +matrix: + include: + - python: 2.7 + os: linux + sudo: required + dist: trusty + env: _REZ_SHELL=bash install: - # Adding tcsh - - "sudo apt-get install tcsh" - # Installing rez + - "if [ "$_REZ_SHELL" == "tcsh" ]; then sudo apt-get install tcsh; fi" - "python ./install.py ../rez_install" script: # Running rez tests - - "../rez_install/bin/rez/rez-selftest -s bash" + - "../rez_install/bin/rez/rez-selftest -s $_REZ_SHELL" From 17e2ac89eb31ae231faaffd3d5444fb648fc6bde Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 28 Nov 2017 15:37:34 +1100 Subject: [PATCH 104/124] travis testing --- .travis.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 3388d0016..eda360398 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,8 +4,9 @@ matrix: include: - python: 2.7 os: linux - sudo: required - dist: trusty + env: _REZ_SHELL=bash + - python: 2.7 + os: osx env: _REZ_SHELL=bash install: From e9a0092722694d626bd03efce2fc1a07e2ba777e Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 28 Nov 2017 15:38:46 +1100 Subject: [PATCH 105/124] fixed travis yaml syntax err --- .travis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index eda360398..b709c94d0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,9 +10,9 @@ matrix: env: _REZ_SHELL=bash install: - - "if [ "$_REZ_SHELL" == "tcsh" ]; then sudo apt-get install tcsh; fi" - - "python ./install.py ../rez_install" + - 'if [ "$_REZ_SHELL" == "tcsh" ]; then sudo apt-get install tcsh; fi' + - 'python ./install.py ../rez_install' script: # Running rez tests - - "../rez_install/bin/rez/rez-selftest -s $_REZ_SHELL" + - '../rez_install/bin/rez/rez-selftest -s $_REZ_SHELL' From 54b6fdd3cb4ef220f93d1a82fab72a428a1b87e5 Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 28 Nov 2017 15:53:07 +1100 Subject: [PATCH 106/124] testing travis --- .travis.yml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index b709c94d0..285491ff2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,14 +5,10 @@ matrix: - python: 2.7 os: linux env: _REZ_SHELL=bash - - python: 2.7 - os: osx - env: _REZ_SHELL=bash install: - 'if [ "$_REZ_SHELL" == "tcsh" ]; then sudo apt-get install tcsh; fi' - 'python ./install.py ../rez_install' script: - # Running rez tests - '../rez_install/bin/rez/rez-selftest -s $_REZ_SHELL' From efb1c3d729a92057eb6ff39d679d7bf77c801d9b Mon Sep 17 00:00:00 2001 From: Brendan Abel Date: Tue, 28 Nov 2017 11:28:17 -0800 Subject: [PATCH 107/124] Updated tests to use subprocess instead of sh not that sh is removed --- src/rez/tests/test_shells.py | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/src/rez/tests/test_shells.py b/src/rez/tests/test_shells.py index e2805f26e..bded5a5bf 100644 --- a/src/rez/tests/test_shells.py +++ b/src/rez/tests/test_shells.py @@ -146,23 +146,16 @@ def test_rcfile(self): @shell_dependent(exclude=["cmd"]) @install_dependent def test_rez_env_output(self): - # TODO: this test does not run on Windows using the CMD shell as it - # does not accept commands from stdin. Rather than explicitly skipping - # the test (via the decorator) perhaps we should check for startup - # capabilities as the other tests do. - from rez.vendor.sh import sh - # here we are making sure that running a command via rez-env prints - # exactly what we expect. We use 'sh' because subprocess strips special - # characters such as color codes - we want to ensure that the output - # EXACTLY matches the output of the command being run. + # exactly what we expect. echo_cmd = which("echo") if not echo_cmd: print "\nskipping test, 'echo' command not found." return - cmd = sh.Command(os.path.join(system.rez_bin_path, "rez-env")) - sh_out = cmd(["--", "echo", "hey"]) + cmd = [os.path.join(system.rez_bin_path, "rez-env"), "--", "echo", "hey"] + process = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + sh_out, _ = process.communicate() out = str(sh_out).strip() self.assertEqual(out, "hey") From 5510f2929f8192d27e5f719919a159b547ae847c Mon Sep 17 00:00:00 2001 From: ajohns Date: Wed, 29 Nov 2017 13:59:53 +1100 Subject: [PATCH 108/124] -added subprocess.Popen wrapper -ported to wrapper --- src/rez/bind/_utils.py | 4 +- src/rez/developer_package.py | 2 +- src/rez/package_help.py | 7 ++- src/rez/package_py_utils.py | 13 +++-- src/rez/pip.py | 5 +- src/rez/release_vcs.py | 6 ++- src/rez/rex.py | 9 ++-- src/rez/serialise.py | 2 +- src/rez/shells.py | 4 +- src/rez/utils/_version.py | 2 +- src/rez/utils/graph_utils.py | 3 +- src/rez/utils/platform_.py | 22 +++++---- src/rez/utils/syspath.py | 15 ------ src/rez/utils/system.py | 30 ++++++++++++ src/rezplugins/shell/cmd.py | 93 +++++++++++++++++++++++++----------- src/rezplugins/shell/csh.py | 5 +- src/rezplugins/shell/sh.py | 5 +- 17 files changed, 144 insertions(+), 83 deletions(-) delete mode 100644 src/rez/utils/syspath.py create mode 100644 src/rez/utils/system.py diff --git a/src/rez/bind/_utils.py b/src/rez/bind/_utils.py index bcbdc19de..fb75c1bb0 100644 --- a/src/rez/bind/_utils.py +++ b/src/rez/bind/_utils.py @@ -6,6 +6,7 @@ from rez.exceptions import RezBindError from rez.config import config from rez.util import which +from rez.utils.system import popen from rez.utils.logging_ import print_debug from pipes import quote import subprocess @@ -117,8 +118,7 @@ def _run_command(args): cmd_str = ' '.join(quote(x) for x in args) log("running: %s" % cmd_str) - p = subprocess.Popen(args, stdout=subprocess.PIPE, - stderr=subprocess.PIPE) + p = popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = p.communicate() return stdout, stderr, p.returncode diff --git a/src/rez/developer_package.py b/src/rez/developer_package.py index 307dbb591..f00a5f700 100644 --- a/src/rez/developer_package.py +++ b/src/rez/developer_package.py @@ -3,7 +3,7 @@ from rez.serialise import load_from_file, FileFormat from rez.packages_ import create_package from rez.exceptions import PackageMetadataError, InvalidPackageError -from rez.utils.syspath import add_sys_paths +from rez.utils.system import add_sys_paths from rez.utils.sourcecode import SourceCode from rez.utils.logging_ import print_info, print_error from inspect import isfunction diff --git a/src/rez/package_help.py b/src/rez/package_help.py index 7082b7e03..b89dced2c 100644 --- a/src/rez/package_help.py +++ b/src/rez/package_help.py @@ -1,6 +1,7 @@ from rez.packages_ import iter_packages from rez.config import config from rez.rex_bindings import VersionBinding +from rez.utils.system import popen from rez.utils.backcompat import convert_old_command_expansions from rez.utils.scope import scoped_formatter from rez.system import system @@ -92,7 +93,8 @@ def open(self, section_index=0): else: if self._verbose: print "running command: %s" % uri - subprocess.Popen(uri, shell=True).wait() + p = popen(uri, shell=True) + p.wait() def print_info(self, buf=None): """Print help sections.""" @@ -112,7 +114,8 @@ def _open_url(cls, url): cmd = [config.browser, url] if not config.quiet: print "running command: %s" % " ".join(cmd) - subprocess.Popen(cmd).communicate() + p = popen(cmd) + p.communicate() else: if not config.quiet: print "opening URL in browser: %s" % url diff --git a/src/rez/package_py_utils.py b/src/rez/package_py_utils.py index 538f5bed5..703f95458 100644 --- a/src/rez/package_py_utils.py +++ b/src/rez/package_py_utils.py @@ -7,7 +7,7 @@ """ # these imports just forward the symbols into this module's namespace -from rez.utils.sourcecode import late +from rez.utils.system import popen from rez.exceptions import InvalidPackageError @@ -165,7 +165,7 @@ def exec_command(attr, cmd): """ import subprocess - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + p = popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() if p.returncode: @@ -192,9 +192,8 @@ def exec_python(attr, src, executable="python"): if isinstance(src, basestring): src = [src] - p = subprocess.Popen( - [executable, "-c", "; ".join(src)], - stdout=subprocess.PIPE, stderr=subprocess.PIPE) + p = popen([executable, "-c", "; ".join(src)], + stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() if p.returncode: @@ -233,8 +232,8 @@ def find_site_python(module_name, paths=None): py_cmd = 'import {x}; print {x}.__path__'.format(x=module_name) - p = subprocess.Popen(["python", "-c", py_cmd], stdout=subprocess.PIPE, - stderr=subprocess.PIPE) + p = popen(["python", "-c", py_cmd], stdout=subprocess.PIPE, + stderr=subprocess.PIPE) out, err = p.communicate() if p.returncode: diff --git a/src/rez/pip.py b/src/rez/pip.py index 0fbd81e60..0bd192efb 100644 --- a/src/rez/pip.py +++ b/src/rez/pip.py @@ -6,6 +6,7 @@ from rez.vendor.distlib.util import parse_name_and_version from rez.vendor.enum.enum import Enum from rez.resolved_context import ResolvedContext +from rez.system import popen from rez.utils.logging_ import print_debug, print_info, print_warning from rez.exceptions import BuildError, PackageFamilyNotFoundError, \ PackageNotFoundError, convert_errors @@ -97,7 +98,7 @@ def run_pip_command(command_args, pip_version=None, python_version=None): command = [pip_exe] + list(command_args) if context is None: - return subprocess.Popen(command) + return popen(command) else: return context.execute_shell(command=command, block=False) @@ -349,7 +350,7 @@ def _cmd(context, command): _log("running: %s" % cmd_str) if context is None: - p = subprocess.Popen(command) + p = popen(command) else: p = context.execute_shell(command=command, block=False) diff --git a/src/rez/release_vcs.py b/src/rez/release_vcs.py index e4bb85efa..798786382 100644 --- a/src/rez/release_vcs.py +++ b/src/rez/release_vcs.py @@ -1,6 +1,7 @@ from rez.exceptions import ReleaseVCSError from rez.packages_ import get_developer_package from rez.util import which +from rez.utils.system import popen from rez.utils.logging_ import print_debug from rez.utils.filesystem import walk_up_dirs import subprocess @@ -204,9 +205,10 @@ def _cmd(self, *nargs): if self.package.config.debug("package_release"): print_debug("Running command: %s" % cmd_str) - p = subprocess.Popen(nargs, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, cwd=self.pkg_root) + p = popen(nargs, stdout=subprocess.PIPE, stderr=subprocess.PIPE, + cwd=self.pkg_root) out, err = p.communicate() + if p.returncode: print_debug("command stdout:") print_debug(out) diff --git a/src/rez/rex.py b/src/rez/rex.py index d50ba1291..af7724622 100644 --- a/src/rez/rex.py +++ b/src/rez/rex.py @@ -12,6 +12,7 @@ from rez.exceptions import RexError, RexUndefinedVariableError, RezSystemError from rez.util import shlex_join from rez.utils import reraise +from rez.system import popen from rez.utils.sourcecode import SourceCode, SourceCodeError from rez.utils.data_utils import AttrDictWrapper from rez.utils.formatting import expandvars @@ -612,10 +613,10 @@ def subprocess(self, args, **subproc_kwargs): self.target_environ.update(self.manager.environ) shell_mode = not hasattr(args, '__iter__') - return subprocess.Popen(args, - shell=shell_mode, - env=self.target_environ, - **subproc_kwargs) + return popen(args, + shell=shell_mode, + env=self.target_environ, + **subproc_kwargs) def command(self, value): if self.passive: diff --git a/src/rez/serialise.py b/src/rez/serialise.py index 87eee73bd..523d166cb 100644 --- a/src/rez/serialise.py +++ b/src/rez/serialise.py @@ -8,7 +8,7 @@ from rez.utils.filesystem import TempDirs from rez.exceptions import ResourceError, InvalidPackageError from rez.utils.memcached import memcached -from rez.utils.syspath import add_sys_paths +from rez.utils.system import add_sys_paths from rez.config import config from rez.vendor.enum import Enum from rez.vendor import yaml diff --git a/src/rez/shells.py b/src/rez/shells.py index bc5ec00b4..6a6d291cb 100644 --- a/src/rez/shells.py +++ b/src/rez/shells.py @@ -7,7 +7,7 @@ from rez.exceptions import RezSystemError from rez.rex import EscapedString from rez.config import config -from rez.system import system +from rez.system import system, popen import subprocess import os.path import pipes @@ -345,7 +345,7 @@ def _create_ex(): cmd.extend([self.executable, target_file]) try: - p = subprocess.Popen(cmd, env=env, **Popen_args) + p = popen(cmd, env=env, **Popen_args) except Exception as e: cmd_str = ' '.join(map(pipes.quote, cmd)) raise RezSystemError("Error running command:\n%s\n%s" diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index f4a88ca91..f2f0deda5 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.15.0" +_rez_version = "2.16.0" try: from rez.vendor.version.version import Version diff --git a/src/rez/utils/graph_utils.py b/src/rez/utils/graph_utils.py index 8e3c4dec8..444c18398 100644 --- a/src/rez/utils/graph_utils.py +++ b/src/rez/utils/graph_utils.py @@ -9,6 +9,7 @@ from ast import literal_eval from rez.config import config from rez.vendor.pydot import pydot +from rez.system import popen from rez.utils.formatting import PackageRequest from rez.exceptions import PackageRequestError from rez.vendor.pygraph.readwrite.dot import read as read_dot @@ -242,7 +243,7 @@ def view_graph(graph_str, dest_file=None): print "loading image viewer (%s)..." % prog if config.image_viewer: - proc = subprocess.Popen((config.image_viewer, dest_file)) + proc = popen([config.image_viewer, dest_file]) proc.wait() viewed = not bool(proc.returncode) diff --git a/src/rez/utils/platform_.py b/src/rez/utils/platform_.py index d42d16629..24fe02804 100644 --- a/src/rez/utils/platform_.py +++ b/src/rez/utils/platform_.py @@ -4,6 +4,7 @@ import os.path import re from rez.util import which +from rez.utils.system import popen from rez.utils.data_utils import cached_property from rez.utils.platform_mapped import platform_mapped from rez.exceptions import RezSystemError @@ -219,9 +220,11 @@ def _parse(txt, distributor_key, release_key): # next, try getting the output of the lsb_release program import subprocess - p = subprocess.Popen(['/usr/bin/env', 'lsb_release', '-a'], - stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + p = popen(['/usr/bin/env', 'lsb_release', '-a'], + stdout=subprocess.PIPE, stderr=subprocess.PIPE) txt = p.communicate()[0] + if not p.returncode: distributor_, release_ = _parse(txt, "Distributor ID:", @@ -230,6 +233,7 @@ def _parse(txt, distributor_key, release_key): distributor = distributor_ if release_ and not release: release = release_ + result = _os() if result: return result @@ -240,6 +244,7 @@ def _parse(txt, distributor_key, release_key): distributor_, release_, _ = platform.linux_distribution() except: distributor_, release_, _ = platform.dist() + if distributor_ and not distributor: distributor = distributor_ if release_ and not release: @@ -339,8 +344,7 @@ def _physical_cores_from_cpuinfo(self): def _physical_cores_from_lscpu(self): import subprocess try: - p = subprocess.Popen(['lscpu'], stdout=subprocess.PIPE, - stderr=subprocess.PIPE) + p = popen(['lscpu'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) except (OSError, IOError): return None @@ -407,9 +411,8 @@ def _editor(self): def _physical_cores_from_osx_sysctl(self): import subprocess try: - p = subprocess.Popen(['sysctl', '-n', 'hw.physicalcpu'], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) + p = popen(['sysctl', '-n', 'hw.physicalcpu'], + stdout=subprocess.PIPE, stderr=subprocess.PIPE) except (OSError, IOError): return None @@ -488,9 +491,8 @@ def _physical_cores_from_wmic(self): # windows import subprocess try: - p = subprocess.Popen('wmic cpu get NumberOfCores /value'.split(), - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) + p = popen('wmic cpu get NumberOfCores /value'.split(), + stdout=subprocess.PIPE, stderr=subprocess.PIPE) except (OSError, IOError): return None diff --git a/src/rez/utils/syspath.py b/src/rez/utils/syspath.py deleted file mode 100644 index 88f00a6bd..000000000 --- a/src/rez/utils/syspath.py +++ /dev/null @@ -1,15 +0,0 @@ -from contextlib import contextmanager -import sys - - -@contextmanager -def add_sys_paths(paths): - """Add to sys.path, and revert on scope exit. - """ - original_syspath = sys.path[:] - sys.path.extend(paths) - - try: - yield - finally: - sys.path = original_syspath diff --git a/src/rez/utils/system.py b/src/rez/utils/system.py new file mode 100644 index 000000000..c44b3487c --- /dev/null +++ b/src/rez/utils/system.py @@ -0,0 +1,30 @@ +from contextlib import contextmanager +import subprocess +import sys + + +@contextmanager +def add_sys_paths(paths): + """Add to sys.path, and revert on scope exit. + """ + original_syspath = sys.path[:] + sys.path.extend(paths) + + try: + yield + finally: + sys.path = original_syspath + + +def popen(args, **kwargs): + """Wrapper for `subprocess.Popen`. + + Avoids python bug described here: https://bugs.python.org/issue3905. This + can arise when apps (maya) install a non-standard stdin handler. + """ + + # avoid non-standard stdin handler + if "stdin" not in kwargs and sys.stdin.fileno() not in (0, 1, 2): + kwargs["stdin"] = subprocess.PIPE + + return subprocess.Popen(args, **kwargs) diff --git a/src/rezplugins/shell/cmd.py b/src/rezplugins/shell/cmd.py index 2d9d406a6..00a759cf6 100644 --- a/src/rezplugins/shell/cmd.py +++ b/src/rezplugins/shell/cmd.py @@ -5,6 +5,7 @@ from rez.rex import RexExecutor, literal, OutputStyle from rez.shells import Shell from rez.system import system +from rez.utils.system import popen from rez.utils.platform_ import platform_ from rez.util import shlex_join import os @@ -61,33 +62,66 @@ def get_startup_sequence(cls, rcfile, norc, stdin, command): @classmethod def get_syspaths(cls): - if not cls.syspaths: - paths = [] - - cmd = ["REG", "QUERY", "HKLM\\SYSTEM\\CurrentControlSet\\Control\\Session Manager\\Environment", "/v", "PATH"] - expected = "\r\nHKEY_LOCAL_MACHINE\\\\SYSTEM\\\\CurrentControlSet\\\\Control\\\\Session Manager\\\\Environment\r\n PATH REG_(EXPAND_)?SZ (.*)\r\n\r\n" - - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, shell=True) - out_, _ = p.communicate() - - if p.returncode == 0: - match = re.match(expected, out_) - if match: - paths.extend(match.group(2).split(os.pathsep)) - - cmd = ["REG", "QUERY", "HKCU\\Environment", "/v", "PATH"] - expected = "\r\nHKEY_CURRENT_USER\\\\Environment\r\n PATH REG_(EXPAND_)?SZ (.*)\r\n\r\n" - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, shell=True) - out_, _ = p.communicate() - - if p.returncode == 0: - match = re.match(expected, out_) - if match: - paths.extend(match.group(2).split(os.pathsep)) - - cls.syspaths = set([x for x in paths if x]) + if cls.syspaths is not None: + return cls.syspaths + + def gen_expected_regex(parts): + whitespace = "[\s]+" + return whitespace.join(parts) + + paths = [] + + cmd = [ + "REG", + "QUERY", + "HKLM\\SYSTEM\\CurrentControlSet\\Control\\Session Manager\\Environment", + "/v", + "PATH" + ] + + expected = gen_expected_regex([ + "HKEY_LOCAL_MACHINE\\\\SYSTEM\\\\CurrentControlSet\\\\Control\\\\Session Manager\\\\Environment", + "PATH", + "REG_(EXPAND_)?SZ", + "(.*)" + ]) + + p = popen(cmd, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, shell=True) + out_, _ = p.communicate() + out_ = out_.strip() + + if p.returncode == 0: + match = re.match(expected, out_) + if match: + paths.extend(match.group(2).split(os.pathsep)) + + cmd = [ + "REG", + "QUERY", + "HKCU\\Environment", + "/v", + "PATH" + ] + + expected = gen_expected_regex([ + "HKEY_CURRENT_USER\\\\Environment", + "PATH", + "REG_(EXPAND_)?SZ", + "(.*)" + ]) + + p = popen(cmd, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, shell=True) + out_, _ = p.communicate() + out_ = out_.strip() + + if p.returncode == 0: + match = re.match(expected, out_) + if match: + paths.extend(match.group(2).split(os.pathsep)) + + cls.syspaths = set([x for x in paths if x]) return cls.syspaths def _bind_interactive_rez(self): @@ -169,8 +203,9 @@ def _create_ex(): cmd_flags = ['/Q', '/K'] cmd = cmd + [self.executable] + cmd_flags + ['call {}'.format(target_file)] - is_detached = cmd[0] == 'START' - p = subprocess.Popen(cmd, env=env, shell=is_detached, **Popen_args) + is_detached = (cmd[0] == 'START') + + p = popen(cmd, env=env, shell=is_detached, **Popen_args) return p def get_output(self, style=OutputStyle.file): diff --git a/src/rezplugins/shell/csh.py b/src/rezplugins/shell/csh.py index 8592f0edf..d70853f21 100644 --- a/src/rezplugins/shell/csh.py +++ b/src/rezplugins/shell/csh.py @@ -5,6 +5,7 @@ import os.path import subprocess from rez.config import config +from rez.utils.system import popen from rez.utils.platform_ import platform_ from rez.shells import Shell, UnixShell from rez.rex import EscapedString @@ -36,8 +37,8 @@ def get_syspaths(cls): if not cls.syspaths: cmd = "cmd=`which %s`; unset PATH; $cmd %s 'echo __PATHS_ $PATH'" \ % (cls.name(), cls.command_arg) - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, shell=True) + p = popen(cmd, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, shell=True) out_, err_ = p.communicate() if p.returncode: paths = [] diff --git a/src/rezplugins/shell/sh.py b/src/rezplugins/shell/sh.py index 7fc10ac31..721c2d5ab 100644 --- a/src/rezplugins/shell/sh.py +++ b/src/rezplugins/shell/sh.py @@ -6,6 +6,7 @@ import pipes import subprocess from rez.config import config +from rez.utils.system import popen from rez.utils.platform_ import platform_ from rez.shells import Shell, UnixShell from rez.rex import EscapedString @@ -36,8 +37,8 @@ def get_syspaths(cls): if not cls.syspaths: cmd = "cmd=`which %s`; unset PATH; $cmd %s %s 'echo __PATHS_ $PATH'" \ % (cls.name(), cls.norc_arg, cls.command_arg) - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, shell=True) + p = popen(cmd, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, shell=True) out_, err_ = p.communicate() if p.returncode: paths = [] From 067c6842828199d3f216800e01ea3b3aa6c3b034 Mon Sep 17 00:00:00 2001 From: ajohns Date: Wed, 29 Nov 2017 14:29:01 +1100 Subject: [PATCH 109/124] misc bugs --- src/rez/pip.py | 2 +- src/rez/release_vcs.py | 4 +++- src/rez/rex.py | 2 +- src/rez/shells.py | 3 ++- src/rez/utils/graph_utils.py | 2 +- 5 files changed, 8 insertions(+), 5 deletions(-) diff --git a/src/rez/pip.py b/src/rez/pip.py index 0bd192efb..014e586e9 100644 --- a/src/rez/pip.py +++ b/src/rez/pip.py @@ -6,7 +6,7 @@ from rez.vendor.distlib.util import parse_name_and_version from rez.vendor.enum.enum import Enum from rez.resolved_context import ResolvedContext -from rez.system import popen +from rez.utils.system import popen from rez.utils.logging_ import print_debug, print_info, print_warning from rez.exceptions import BuildError, PackageFamilyNotFoundError, \ PackageNotFoundError, convert_errors diff --git a/src/rez/release_vcs.py b/src/rez/release_vcs.py index 798786382..994c161b7 100644 --- a/src/rez/release_vcs.py +++ b/src/rez/release_vcs.py @@ -4,6 +4,7 @@ from rez.utils.system import popen from rez.utils.logging_ import print_debug from rez.utils.filesystem import walk_up_dirs +from pipes import quote import subprocess @@ -201,7 +202,8 @@ def export(cls, revision, path): def _cmd(self, *nargs): """Convenience function for executing a program such as 'git' etc.""" - cmd_str = ' '.join(nargs) + cmd_str = ' '.join(map(quote, nargs)) + if self.package.config.debug("package_release"): print_debug("Running command: %s" % cmd_str) diff --git a/src/rez/rex.py b/src/rez/rex.py index af7724622..cfb3ecd00 100644 --- a/src/rez/rex.py +++ b/src/rez/rex.py @@ -12,7 +12,7 @@ from rez.exceptions import RexError, RexUndefinedVariableError, RezSystemError from rez.util import shlex_join from rez.utils import reraise -from rez.system import popen +from rez.utils.system import popen from rez.utils.sourcecode import SourceCode, SourceCodeError from rez.utils.data_utils import AttrDictWrapper from rez.utils.formatting import expandvars diff --git a/src/rez/shells.py b/src/rez/shells.py index 6a6d291cb..e54af8105 100644 --- a/src/rez/shells.py +++ b/src/rez/shells.py @@ -4,10 +4,11 @@ from rez.rex import RexExecutor, ActionInterpreter, OutputStyle from rez.util import which, shlex_join from rez.utils.logging_ import print_warning +from rez.utils.system import popen +from rez.system import system from rez.exceptions import RezSystemError from rez.rex import EscapedString from rez.config import config -from rez.system import system, popen import subprocess import os.path import pipes diff --git a/src/rez/utils/graph_utils.py b/src/rez/utils/graph_utils.py index 444c18398..35d6d6efa 100644 --- a/src/rez/utils/graph_utils.py +++ b/src/rez/utils/graph_utils.py @@ -9,7 +9,7 @@ from ast import literal_eval from rez.config import config from rez.vendor.pydot import pydot -from rez.system import popen +from rez.utils.system import popen from rez.utils.formatting import PackageRequest from rez.exceptions import PackageRequestError from rez.vendor.pygraph.readwrite.dot import read as read_dot From 91f4ea8abd1329feeb2c0f5e2ffe3315da5c97cc Mon Sep 17 00:00:00 2001 From: ajohns Date: Wed, 29 Nov 2017 15:07:51 +1100 Subject: [PATCH 110/124] added WIKI.md --- WIKI.md | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 WIKI.md diff --git a/WIKI.md b/WIKI.md new file mode 100644 index 000000000..6e060260e --- /dev/null +++ b/WIKI.md @@ -0,0 +1,10 @@ +# Rez Documentation + +Rez uses the GitHub Wiki system for its documentation. To update: + + ]$ git clone git@github.com:nerdvegas/rez.wiki.git + +Then follow the README.md instructions in that repository. + +Rez is not currently accepting documentation PRs - GutHub doesn't support this +natively and I have to jump through some hoops to support it. From 36d78e67dcd53a088b00762dce16ea83d43ddb14 Mon Sep 17 00:00:00 2001 From: ajohns Date: Wed, 29 Nov 2017 16:02:44 +1100 Subject: [PATCH 111/124] added wiki update script --- .gitignore | 1 + WIKI.md | 10 ---------- update-wiki.sh | 28 ++++++++++++++++++++++++++++ 3 files changed, 29 insertions(+), 10 deletions(-) delete mode 100644 WIKI.md create mode 100644 update-wiki.sh diff --git a/.gitignore b/.gitignore index 5f9ad53aa..2d0984bd7 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,4 @@ dist/ *~ docs/_build .DS_Store +.rez-gen-wiki-tmp diff --git a/WIKI.md b/WIKI.md deleted file mode 100644 index 6e060260e..000000000 --- a/WIKI.md +++ /dev/null @@ -1,10 +0,0 @@ -# Rez Documentation - -Rez uses the GitHub Wiki system for its documentation. To update: - - ]$ git clone git@github.com:nerdvegas/rez.wiki.git - -Then follow the README.md instructions in that repository. - -Rez is not currently accepting documentation PRs - GutHub doesn't support this -natively and I have to jump through some hoops to support it. diff --git a/update-wiki.sh b/update-wiki.sh new file mode 100644 index 000000000..7ab6a206a --- /dev/null +++ b/update-wiki.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# +# This script: +# 1. Takes the content from here: https://github.com/nerdvegas/rez-wiki; +# 2. Then writes it into a local clone of https://github.com/nerdvegas/rez.wiki.git; +# 3. Then follows the procedure outlined in README from 2. +# +# This process exists because GitHub does not support contributions to wiki +# repositories - this is a workaround. +# +set -e + +rm -rf .rez-gen-wiki-tmp +mkdir .rez-gen-wiki-tmp +cd .rez-gen-wiki-tmp + +git clone git@github.com:nerdvegas/rez.git +git clone git@github.com:nerdvegas/rez-wiki.git +git clone git@github.com:nerdvegas/rez.wiki.git + +cp ./rez-wiki/pages/* ./rez.wiki/pages/ +export REZ_SOURCE_DIR=$(pwd)/rez + +cd ./rez.wiki +python ./utils/process.py +bash ./utils/update.sh + +rm -rf .rez-gen-wiki-tmp From a3b79478ddee5be304f833c5db8b5910bc5520d8 Mon Sep 17 00:00:00 2001 From: ajohns Date: Wed, 6 Dec 2017 14:20:42 +1100 Subject: [PATCH 112/124] update wiki update script --- update-wiki.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/update-wiki.sh b/update-wiki.sh index 7ab6a206a..b72edbe87 100644 --- a/update-wiki.sh +++ b/update-wiki.sh @@ -18,7 +18,8 @@ git clone git@github.com:nerdvegas/rez.git git clone git@github.com:nerdvegas/rez-wiki.git git clone git@github.com:nerdvegas/rez.wiki.git -cp ./rez-wiki/pages/* ./rez.wiki/pages/ +cp -f ./rez-wiki/pages/* ./rez.wiki/pages/ +cp -rf ./rez-wiki/media/* ./rez.wiki/media/ export REZ_SOURCE_DIR=$(pwd)/rez cd ./rez.wiki From 143cf66e5012c350cc1b194872f574b43b1cb9ab Mon Sep 17 00:00:00 2001 From: ajohns Date: Wed, 13 Dec 2017 13:18:17 +1100 Subject: [PATCH 113/124] -removed wiki gen script, moved to rez-wiki repo --- .gitignore | 1 - update-wiki.sh | 29 ----------------------------- 2 files changed, 30 deletions(-) delete mode 100644 update-wiki.sh diff --git a/.gitignore b/.gitignore index 2d0984bd7..5f9ad53aa 100644 --- a/.gitignore +++ b/.gitignore @@ -9,4 +9,3 @@ dist/ *~ docs/_build .DS_Store -.rez-gen-wiki-tmp diff --git a/update-wiki.sh b/update-wiki.sh deleted file mode 100644 index b72edbe87..000000000 --- a/update-wiki.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash -# -# This script: -# 1. Takes the content from here: https://github.com/nerdvegas/rez-wiki; -# 2. Then writes it into a local clone of https://github.com/nerdvegas/rez.wiki.git; -# 3. Then follows the procedure outlined in README from 2. -# -# This process exists because GitHub does not support contributions to wiki -# repositories - this is a workaround. -# -set -e - -rm -rf .rez-gen-wiki-tmp -mkdir .rez-gen-wiki-tmp -cd .rez-gen-wiki-tmp - -git clone git@github.com:nerdvegas/rez.git -git clone git@github.com:nerdvegas/rez-wiki.git -git clone git@github.com:nerdvegas/rez.wiki.git - -cp -f ./rez-wiki/pages/* ./rez.wiki/pages/ -cp -rf ./rez-wiki/media/* ./rez.wiki/media/ -export REZ_SOURCE_DIR=$(pwd)/rez - -cd ./rez.wiki -python ./utils/process.py -bash ./utils/update.sh - -rm -rf .rez-gen-wiki-tmp From d803816bbb82709a3888b34245377c5fd3e5ded9 Mon Sep 17 00:00:00 2001 From: ajohns Date: Wed, 13 Dec 2017 14:13:55 +1100 Subject: [PATCH 114/124] -merged PR, minor changes --- src/rez/cli/depends.py | 14 +++++++------- src/rez/package_search.py | 17 ++++++----------- src/rez/utils/_version.py | 2 +- 3 files changed, 14 insertions(+), 19 deletions(-) diff --git a/src/rez/cli/depends.py b/src/rez/cli/depends.py index 887d7af6f..8f6f9f994 100644 --- a/src/rez/cli/depends.py +++ b/src/rez/cli/depends.py @@ -8,14 +8,14 @@ def setup_parser(parser, completions=False): "-d", "--depth", type=int, help="dependency tree depth limit") parser.add_argument( - "--paths", type=str, default=None, + "--paths", type=str, help="set package search path") parser.add_argument( - "-b", "--build-requires", action="store_true", default=False, - help="Include build_requires") + "-b", "--build-requires", action="store_true", + help="Include build requirements") parser.add_argument( - "-p", "--private-build-requires", action="store_true", default=False, - help="Include private_build_requires") + "-p", "--private-build-requires", action="store_true", + help="Include private build requirements") parser.add_argument( "-g", "--graph", action="store_true", help="display the dependency tree as an image") @@ -23,13 +23,13 @@ def setup_parser(parser, completions=False): "--pg", "--print-graph", dest="print_graph", action="store_true", help="print the dependency tree as a string") parser.add_argument( - "--wg", "--write-graph", dest="write_graph", type=str, metavar='FILE', + "--wg", "--write-graph", dest="write_graph", metavar='FILE', help="write the dependency tree to FILE") parser.add_argument( "-q", "--quiet", action="store_true", help="don't print progress bar or depth indicators") PKG_action = parser.add_argument( - "PKG", type=str, + "PKG", help="package that other packages depend on") if completions: diff --git a/src/rez/package_search.py b/src/rez/package_search.py index 373c93a6a..8ffea8046 100644 --- a/src/rez/package_search.py +++ b/src/rez/package_search.py @@ -55,7 +55,6 @@ def get_reverse_dependency_tree(package_name, depth=None, paths=None, lookup = defaultdict(set) for i, package_name_ in enumerate(package_names): - bar.next() it = iter_packages(name=package_name_, paths=paths) packages = list(it) if not packages: @@ -63,23 +62,19 @@ def get_reverse_dependency_tree(package_name, depth=None, paths=None, pkg = max(packages, key=lambda x: x.version) requires = [] - if not build_requires and not private_build_requires: - requires = pkg.requires or [] - else: - for variant in pkg.iter_variants(): - requires += variant.get_requires(build_requires, private_build_requires) - - requires = set(requires) - for req_list in (pkg.variants or []): - requires.update(req_list) + for variant in pkg.iter_variants(): + requires += variant.get_requires(build_requires, private_build_requires) for req in requires: if not req.conflict: lookup[req.name].add(package_name_) - # perform traversal + bar.next() + bar.finish() + + # perform traversal n = 0 consumed = set([package_name]) working_set = set([package_name]) diff --git a/src/rez/utils/_version.py b/src/rez/utils/_version.py index f2f0deda5..9b695a273 100644 --- a/src/rez/utils/_version.py +++ b/src/rez/utils/_version.py @@ -1,7 +1,7 @@ # Update this value to version up Rez. Do not place anything else in this file. -_rez_version = "2.16.0" +_rez_version = "2.17.0" try: from rez.vendor.version.version import Version From e67f8950c0f2811913782b114f568a9a6416e9ab Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 9 Jan 2018 11:27:18 +1100 Subject: [PATCH 115/124] dumb change to trigger travis --- install.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/install.py b/install.py index aa8c20f09..e8dd0b176 100644 --- a/install.py +++ b/install.py @@ -56,7 +56,7 @@ def _get_script_text(self, entry): def patch_rez_binaries(dest_dir): bin_names = os.listdir(bin_path) _, _, _, venv_bin_path = path_locations(dest_dir) - venv_py_executable = which("python", env={"PATH":venv_bin_path, + venv_py_executable = which("python", env={"PATH":venv_bin_path, "PATHEXT":os.environ.get("PATHEXT", "")}) # delete rez bin files written by setuptools @@ -169,3 +169,4 @@ def copy_completion_scripts(dest_dir): print "You may also want to source the relevant completion script from:" print completion_path print + From a661ac7fa602d727ec4f0d99fded2527c7d5fa73 Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 9 Jan 2018 11:33:59 +1100 Subject: [PATCH 116/124] -temp rollback to prev travis build env - new env might be using cmake-3 which may be causing test fails --- .travis.yml | 3 +++ install.py | 1 - 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 285491ff2..168284389 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,3 +12,6 @@ install: script: - '../rez_install/bin/rez/rez-selftest -s $_REZ_SHELL' + +# TEMP +group: deprecated-2017Q4 diff --git a/install.py b/install.py index e8dd0b176..393952679 100644 --- a/install.py +++ b/install.py @@ -169,4 +169,3 @@ def copy_completion_scripts(dest_dir): print "You may also want to source the relevant completion script from:" print completion_path print - From 9beb0fb13bafc9e536697ca33dcc029f2a5af435 Mon Sep 17 00:00:00 2001 From: ajohns Date: Tue, 9 Jan 2018 12:30:05 +1100 Subject: [PATCH 117/124] -removed code to support install of empty (no build) pkgs -just add new attrib 'build_command=False' to package.py instead if you need this --- .../cmake_files/FindStaticLibs.cmake | 31 --- .../cmake_files/InstallDirs.cmake | 31 --- .../cmake_files/InstallFiles.cmake | 22 --- .../build_system/cmake_files/RezBuild.cmake | 23 --- .../cmake_files/RezFindPackages.cmake | 29 --- .../cmake_files/RezInstallCMake.cmake | 20 -- .../cmake_files/RezInstallContext.cmake | 25 --- .../cmake_files/RezInstallPython.cmake | 26 --- .../cmake_files/RezInstallWrappers.cmake | 176 ------------------ .../build_system/cmake_files/RezProject.cmake | 33 +--- .../build_system/cmake_files/Utils.cmake | 19 -- 11 files changed, 6 insertions(+), 429 deletions(-) delete mode 100644 src/rezplugins/build_system/cmake_files/RezInstallWrappers.cmake diff --git a/src/rezplugins/build_system/cmake_files/FindStaticLibs.cmake b/src/rezplugins/build_system/cmake_files/FindStaticLibs.cmake index be6813849..a4d0dd796 100644 --- a/src/rezplugins/build_system/cmake_files/FindStaticLibs.cmake +++ b/src/rezplugins/build_system/cmake_files/FindStaticLibs.cmake @@ -47,34 +47,3 @@ macro (find_static_libs libdirsvar libsvar outvar) endforeach(lib ${libs}) endmacro (find_static_libs libdirs libs outvar) - - - - - - - - - - - - - - - -# Copyright 2008-2012 Dr D Studios Pty Limited (ACN 127 184 954) (Dr. D Studios) -# -# This file is part of Rez. -# -# Rez is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Rez is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with Rez. If not, see . diff --git a/src/rezplugins/build_system/cmake_files/InstallDirs.cmake b/src/rezplugins/build_system/cmake_files/InstallDirs.cmake index 71914c0b6..21475ee9f 100644 --- a/src/rezplugins/build_system/cmake_files/InstallDirs.cmake +++ b/src/rezplugins/build_system/cmake_files/InstallDirs.cmake @@ -69,34 +69,3 @@ macro (install_dirs_) endmacro (install_dirs_) - - - - - - - - - - - - - - - -# Copyright 2008-2012 Dr D Studios Pty Limited (ACN 127 184 954) (Dr. D Studios) -# -# This file is part of Rez. -# -# Rez is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Rez is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with Rez. If not, see . diff --git a/src/rezplugins/build_system/cmake_files/InstallFiles.cmake b/src/rezplugins/build_system/cmake_files/InstallFiles.cmake index 700aea3ea..dfdeec2b6 100644 --- a/src/rezplugins/build_system/cmake_files/InstallFiles.cmake +++ b/src/rezplugins/build_system/cmake_files/InstallFiles.cmake @@ -143,25 +143,3 @@ macro (install_files_) endforeach(f ${INSTF_DEFAULT_ARGS}) endmacro (install_files_) - - - - - - -# Copyright 2008-2012 Dr D Studios Pty Limited (ACN 127 184 954) (Dr. D Studios) -# -# This file is part of Rez. -# -# Rez is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Rez is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with Rez. If not, see . diff --git a/src/rezplugins/build_system/cmake_files/RezBuild.cmake b/src/rezplugins/build_system/cmake_files/RezBuild.cmake index 3aae61a46..0173177b9 100644 --- a/src/rezplugins/build_system/cmake_files/RezBuild.cmake +++ b/src/rezplugins/build_system/cmake_files/RezBuild.cmake @@ -113,26 +113,3 @@ endif(COVERAGE) macro(rez_package_in_use pkg_string result) list_contains(${result} ${pkg_string} ${REZ_BUILD_ALL_PKGS}) endmacro(rez_package_in_use pkg_string result) - - - - - - - -# Copyright 2008-2012 Dr D Studios Pty Limited (ACN 127 184 954) (Dr. D Studios) -# -# This file is part of Rez. -# -# Rez is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Rez is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with Rez. If not, see . diff --git a/src/rezplugins/build_system/cmake_files/RezFindPackages.cmake b/src/rezplugins/build_system/cmake_files/RezFindPackages.cmake index 69edc8d67..9a78d0289 100644 --- a/src/rezplugins/build_system/cmake_files/RezFindPackages.cmake +++ b/src/rezplugins/build_system/cmake_files/RezFindPackages.cmake @@ -251,32 +251,3 @@ macro (rez_find_packages) endif(DFP_AUTO) endmacro (rez_find_packages) - - - - - - - - - - - - - -# Copyright 2008-2012 Dr D Studios Pty Limited (ACN 127 184 954) (Dr. D Studios) -# -# This file is part of Rez. -# -# Rez is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Rez is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with Rez. If not, see . diff --git a/src/rezplugins/build_system/cmake_files/RezInstallCMake.cmake b/src/rezplugins/build_system/cmake_files/RezInstallCMake.cmake index 025ac52b7..45d3cccbc 100644 --- a/src/rezplugins/build_system/cmake_files/RezInstallCMake.cmake +++ b/src/rezplugins/build_system/cmake_files/RezInstallCMake.cmake @@ -262,23 +262,3 @@ macro(_rez_install_auto_cmake) endif() endmacro(_rez_install_auto_cmake) - - - - -# Copyright 2008-2012 Dr D Studios Pty Limited (ACN 127 184 954) (Dr. D Studios) -# -# This file is part of Rez. -# -# Rez is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Rez is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with Rez. If not, see . diff --git a/src/rezplugins/build_system/cmake_files/RezInstallContext.cmake b/src/rezplugins/build_system/cmake_files/RezInstallContext.cmake index 99e9779d9..077300227 100644 --- a/src/rezplugins/build_system/cmake_files/RezInstallContext.cmake +++ b/src/rezplugins/build_system/cmake_files/RezInstallContext.cmake @@ -109,28 +109,3 @@ macro (rez_install_context) ) endmacro (rez_install_context) - - - - - - - - - -# Copyright 2008-2012 Dr D Studios Pty Limited (ACN 127 184 954) (Dr. D Studios) -# -# This file is part of Rez. -# -# Rez is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Rez is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with Rez. If not, see . diff --git a/src/rezplugins/build_system/cmake_files/RezInstallPython.cmake b/src/rezplugins/build_system/cmake_files/RezInstallPython.cmake index 66eb0664e..6f624ab98 100644 --- a/src/rezplugins/build_system/cmake_files/RezInstallPython.cmake +++ b/src/rezplugins/build_system/cmake_files/RezInstallPython.cmake @@ -38,29 +38,3 @@ macro (rez_install_python) install_python(${ARGV} BIN python) endmacro (rez_install_python) - - - - - - - - - - -# Copyright 2008-2012 Dr D Studios Pty Limited (ACN 127 184 954) (Dr. D Studios) -# -# This file is part of Rez. -# -# Rez is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Rez is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with Rez. If not, see . diff --git a/src/rezplugins/build_system/cmake_files/RezInstallWrappers.cmake b/src/rezplugins/build_system/cmake_files/RezInstallWrappers.cmake deleted file mode 100644 index 66ddca36d..000000000 --- a/src/rezplugins/build_system/cmake_files/RezInstallWrappers.cmake +++ /dev/null @@ -1,176 +0,0 @@ -# -# rez_install_wrappers -# -# This macro installs a context, and also generates a set of wrapper scripts that will -# source this context. For example, say you create a context which contains houdini, -# and you know that the 'hescape' binary is visible on $PATH in this context. If you -# create a wrapper for 'hescape', then a 'hescape' wrapper script will be generated, -# which sources the context and then invokes 'hescape'. -# -# 'label': just a name for the target which builds the context file. -# -# 'context_target': name of the context file to generate. -# -# 'context_name': a name for the context associated with the wrapper. You may have -# multiple wrappers which boot into the same context, and when you start an interactive -# shell within that context with the ---i option, 'context_name' is the label which -# will be written into the prompt. -# -# MODE: resolution mode [default: latest] -# -# PACKAGES: packages in the context. -# -# WRAPPERS: wrapper scripts to generate. If a wrapper name is provided of the form -# "FOO:BAH", then the wrapper can be given a different name to the command it will -# invoke - in this example, the wrapper is called FOO and will invoke BAH. -# -# DESTINATION: relative directory where wrappers and context will be installed to. -# -# EXTRA_COMMANDS: extra bash commands, will be added to the end of the context -# -# Usage: -# rez_install_context(