diff --git a/.github/workflows/tool-tests.yml b/.github/workflows/tool-tests.yml index 739797ab..c0a9f96b 100644 --- a/.github/workflows/tool-tests.yml +++ b/.github/workflows/tool-tests.yml @@ -2,6 +2,9 @@ name: Tool Tests env: GH_TOKEN: ${{ secrets.GH_TOKEN }} + MICROSOFT_EMAIL: gindibay@microsoft.com + USER_NAME: Gurkan Indibay + MAIN_BRANCH: all-citus on: push: @@ -25,10 +28,14 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v2 + - name: Define git credentials + run: git config --global user.email "${MICROSOFT_EMAIL}"&& git config --global user.name "${USER_NAME}" - name: Install package dependencies run: sudo apt install libcurl4-openssl-dev libssl-dev - name: Install python requirements - run: python -m pip install -r python/requirements.txt - - name: Execute unit tests - run: python -m pytest -q python/tests/test_update_package_properties.py + run: python -m pip install -r packaging_automation/requirements.txt + - name: Execute unit tests for "Update Package Properties" + run: python -m pytest -q packaging_automation/tests/test_update_package_properties.py + - name: Execute unit tests for "Prepare Release" + run: python -m pytest -q packaging_automation/tests/test_prepare_release.py diff --git a/bash/update_package_properties.sh b/bash/update_package_properties.sh index bff8a51b..2c947d3b 100755 --- a/bash/update_package_properties.sh +++ b/bash/update_package_properties.sh @@ -26,8 +26,15 @@ commit_message="Bump to ${PRJ_NAME} ${PRJ_VER}" git checkout -b "${pr_branch_name}" -python tools/python/update_package_properties.py --gh_token "${GH_TOKEN}" --prj_name "${PRJ_NAME}" --prj_ver "${PRJ_VER}" --tag_name "${TAG_NAME}" --fancy "${FANCY}" \ - --fancy_ver_no "${FANCY_VERSION_NO}" --email "${MICROSOFT_EMAIL}" --name "${NAME}" --date "$(date '+%Y.%m.%d %H:%M:%S %z')" --exec_path "$(pwd)" +python tools/python/update_package_properties.py --gh_token "${GH_TOKEN}" \ + --prj_name "${PRJ_NAME}" \ + --tag_name "${TAG_NAME}" \ + --fancy "${FANCY}" \ + --fancy_ver_no "${FANCY_VERSION_NO}" \ + --email "${MICROSOFT_EMAIL}" \ + --name "${NAME}" \ + --date "$(date '+%Y.%m.%d %H:%M:%S %z')" \ + --exec_path "$(pwd)" git commit -a -m "${commit_message}" @@ -35,4 +42,5 @@ echo "{\"title\":\"${commit_message}\", \"head\":\"${pr_branch_name}\", \"base\" git push origin "${pr_branch_name}" -curl -g -H "Accept: application/vnd.github.v3.full+json" -X POST --user "${GH_TOKEN}:x-oauth-basic" -d "{\"title\":\"${commit_message}\", \"head\":\"${pr_branch_name}\", \"base\":\"${main_branch_name}\"}" https://api.github.com/repos/citusdata/packaging/pulls +curl -g -H "Accept: application/vnd.github.v3.full+json" -X POST --user "${GH_TOKEN}:x-oauth-basic" -d \ + "{\"title\":\"${commit_message}\", \"head\":\"${pr_branch_name}\", \"base\":\"${main_branch_name}\"}" https://api.github.com/repos/citusdata/packaging/pulls diff --git a/packaging_automation/README.md b/packaging_automation/README.md new file mode 100644 index 00000000..5cd3c7cd --- /dev/null +++ b/packaging_automation/README.md @@ -0,0 +1,79 @@ +# **Prepare Release Usage** + +prepare-release.py script performs the pre-packaging configurations in citus/citus-enterprise projects. + +## Installation + +Before using script, you need to make sure that Python > 3.8 is installed in your system. + +### Clone Tools Repository + +git clone https://github.com/citusdata/tools.git + +Enter 'tools' directory + +``` console +cd tools +``` + +### Install Required Python Libraries + +Verify pip installation + +``` console +python -m pip --version +``` +Output should be like following + +``` console +pip 21.1.2 from /home/vagrant/.local/lib/python3.8/site-packages/pip (python 3.8) +``` + +If you get error, you should first install pip +``` console +sudo apt install python3-pip +``` +Install the required libraries to execute the script +``` console +python -m pip install -r packaging_automation/requirements.txt +``` +If all the steps above completed successfully , you are ready for script execution + +## Script Usage + +Script can be used for either major release (i.e. third digit of release is '0' e.g. 10.1.0) or +patch release (i.e. third digit of release is other than '0' e.g. 10.0.4). + +### Available flags + +**--gh_token:** Personal access token that is authorized to commit citus/citus-enterprise projects. (Required) + +**--prj_name:** Project to be released. Allowed values 'citus' and 'citus-enterprise (Required) + +**--prj_ver:** Upcoming version to be used for release. should include three level of digits separated by dots, e.g: 10.0.1 +(Required) + +**--main_branch:** Branch to be used as base to be used for configuration changes. There is no need for base scenario. +This flag can be used for testing purposes. If not used, default branch value is used; i.e. for 'citus' 'master, for 'citus-enterprise' 'enterprise-master' + +**--is_test:** If used, branches would not be pushed remote repository and created release branches would be prefixed with 'test'. Default value is False + +**--cherry_pick_enabled:** Available only for patch release. If used, --earliest_pr_date flag also should be used.Gets all PR's with 'backport' label created after earliest_pr_date + +**--earliest_pr_date:** Used with --cherry-pick-enabled flag. Date format is 'Y.m.d' e.g 2012.01.21. PR's merged after this date would be listed and cherry-picked. + +**--schema_version:** Available only for patch release. If used, schema version in citus.control file would be updated. + +###Example Usage + +####Major +``` console +python -m packaging_automation.prepare_release --gh_token --prj_name citus --prj_ver 10.1.0 +``` +#### Patch +``` console +python -m packaging_automation.prepare_release --gh_token --prj_name citus-enterprise --prj_ver 10.0.4 --schema_version 10.0-5 +``` + + + diff --git a/python/__init__.py b/packaging_automation/__init__.py similarity index 100% rename from python/__init__.py rename to packaging_automation/__init__.py diff --git a/packaging_automation/common_tool_methods.py b/packaging_automation/common_tool_methods.py new file mode 100644 index 00000000..3a7c15c2 --- /dev/null +++ b/packaging_automation/common_tool_methods.py @@ -0,0 +1,292 @@ +import os +import re +import subprocess +from datetime import datetime +from typing import Dict, List +from typing import Tuple + +import pathlib2 +from git import Repo +from github import Repository, PullRequest, Commit +from jinja2 import Environment, FileSystemLoader + +from .common_validations import (is_tag, is_version) + +BASE_GIT_PATH = pathlib2.Path(__file__).parents[1] +PATCH_VERSION_MATCH_FROM_MINOR_SUFFIX = "\.\d{1,3}" + +# When using GitPython library Repo objects should be closed to be able to delete cloned sources +# referenced by Repo objects.References are stored in below array to be able to close +# all resources after the code execution. +referenced_repos:List[Repo] = [] + + +def get_new_repo(working_dir: str) -> Repo: + repo = Repo(working_dir) + referenced_repos.append(repo) + return repo + + +def release_all_repos(): + for repo in referenced_repos: + repo.close() + + +def get_spec_file_name(project_name: str) -> str: + return f"{project_name}.spec" + + +def get_project_version_from_tag_name(tag_name: is_tag(str)) -> str: + return tag_name[1:] + + +def get_template_environment(template_dir: str) -> Environment: + file_loader = FileSystemLoader(template_dir) + env = Environment(loader=file_loader) + return env + + +def find_nth_occurrence_position(subject_string: str, search_string: str, n) -> int: + start = subject_string.find(search_string) + + while start >= 0 and n > 1: + start = subject_string.find(search_string, start + 1) + n -= 1 + return start + + +def find_nth_matching_line_and_line_number(subject_string: str, regex_pattern: str, n: int) -> Tuple[int, str]: + """Takes a subject string, regex param and the search index as parameter and returns line number of found match. + If not found returns -1""" + lines = subject_string.splitlines() + counter = 0 + for line_number, line in enumerate(lines): + if re.match(regex_pattern, line): + counter = counter + 1 + if counter == n: + return line_number, lines[line_number] + return -1, "" + + +def remove_text_with_parenthesis(param: str) -> str: + """Removes texts within parenthesis i.e. outside parenthesis(inside parenthesis)-> outside parenthesis """ + return re.sub(r"[(\[].*?[)\]]", "", param) + + +def run(command, *args, **kwargs): + result = subprocess.run(command, *args, check=True, shell=True, **kwargs) + return result + + +def cherry_pick_prs(prs: List[PullRequest.PullRequest]): + for pr in prs: + commits = pr.get_commits() + for single_commit in commits: + if not is_merge_commit(single_commit): + cp_result = run(f"git cherry-pick -x {single_commit.commit.sha}") + print( + f"Cherry pick result for PR no {pr.number} and commit sha {single_commit.commit.sha}: {cp_result}") + + +def get_minor_version(version: str) -> str: + project_version_details = get_version_details(version) + return f'{project_version_details["major"]}.{project_version_details["minor"]}' + + +def get_patch_version_regex(version: is_version(str)): + return fr"^{re.escape(get_minor_version(version))}{PATCH_VERSION_MATCH_FROM_MINOR_SUFFIX}$" + + +def is_merge_commit(commit: Commit): + return len(commit.parents) <= 1 + + +def get_version_details(version: is_version(str)) -> Dict[str, str]: + version_parts = version.split(".") + return {"major": version_parts[0], "minor": version_parts[1], "patch": version_parts[2]} + + +def get_upcoming_patch_version(version: is_version(str)) -> str: + project_version_details = get_version_details(version) + return f'{get_upcoming_minor_version(version)}.{int(project_version_details["patch"]) + 1}' + + +def get_upcoming_minor_version(version: is_version(str)) -> str: + project_version_details = get_version_details(version) + return f'{project_version_details["major"]}.{int(project_version_details["minor"]) + 1}' + + +def get_last_commit_message(path: str) -> str: + repo = get_new_repo(path) + commit = repo.head.commit + return commit.message + + +def is_major_release(version: is_version(str)) -> bool: + version_info = get_version_details(version) + return version_info["patch"] == "0" + + +def str_array_to_str(str_array: List[str]) -> str: + return f"{os.linesep.join(str_array)}{os.linesep}" + + +def get_prs_for_patch_release(repo: Repository.Repository, earliest_date: datetime, base_branch: str, + last_date: datetime = None): + pull_requests = repo.get_pulls(state="closed", base=base_branch, sort="created", direction="desc") + + # filter pull requests according to given time interval + filtered_pull_requests = list() + for pull_request in pull_requests: + if not pull_request.is_merged(): + continue + if pull_request.merged_at < earliest_date: + continue + if last_date and pull_request.merged_at > last_date: + continue + + filtered_pull_requests.append(pull_request) + + # finally, sort the pr's by their merge date + sorted_pull_requests = sorted(filtered_pull_requests, key=lambda p: p.merged_at) + return sorted_pull_requests + + +def filter_prs_by_label(prs: List[PullRequest.PullRequest], label_name: str): + filtered_prs = [] + for pr in prs: + if any(label.name == label_name for label in pr.labels): + filtered_prs.append(pr) + return filtered_prs + + +def file_includes_line(base_path: str, relative_file_path: str, line_content: str) -> bool: + with open(f"{base_path}/{relative_file_path}", "r") as reader: + content = reader.read() + lines = content.splitlines() + for line in lines: + if line == line_content: + return True + return False + + +def count_line_in_file(base_path: str, relative_file_path: str, search_line: str) -> int: + with open(f"{base_path}/{relative_file_path}", "r") as reader: + content = reader.read() + lines = content.splitlines() + return len(list(filter(lambda line: line == search_line, lines))) + + +def replace_line_in_file(file: str, match_regex: str, replace_str: str) -> bool: + with open(file, "r") as reader: + file_content = reader.read() + lines = file_content.splitlines() + has_match = False + for line_number, line in enumerate(lines): + if re.match(match_regex, line.strip()): + has_match = True + lines[line_number] = replace_str + edited_content = str_array_to_str(lines) + with open(file, "w") as writer: + writer.write(edited_content) + + return has_match + + +def append_line_in_file(file: str, match_regex: str, append_str: str) -> bool: + with open(file, "r+") as reader: + file_content = reader.read() + lines = file_content.splitlines() + has_match = False + copy_lines = lines.copy() + appended_line_index = 0 + for line_number, line in enumerate(lines): + if re.match(match_regex, line.strip()): + has_match = True + + if line_number + 1 < len(lines): + copy_lines[appended_line_index + 1] = append_str + # Since line is added after matched string, shift index start with line_number+1 + # increment of appended_line_index is 2 since copy_lines appended_line_index+1 includes + # append_str + lines_to_be_shifted = lines[line_number + 1:] + copy_lines = copy_lines[0:appended_line_index + 2] + lines_to_be_shifted + else: + copy_lines.append(append_str) + appended_line_index = appended_line_index + 1 + edited_content = str_array_to_str(copy_lines) + with open(file, "w") as writer: + writer.write(edited_content) + + return has_match + + +def prepend_line_in_file(file: str, match_regex: str, append_str: str) -> bool: + with open(file, "r+") as reader: + file_content = reader.read() + lines = file_content.splitlines() + has_match = False + copy_lines = lines.copy() + prepended_line_index = 0 + for line_number, line in enumerate(lines): + if re.match(match_regex, line.strip()): + has_match = True + copy_lines[prepended_line_index] = append_str + # Since line is added before matched string shift index start with line_number + # increment of prepend_line_index is 1 line after prepended_line_index should be shifted + lines_to_be_shifted = lines[line_number:] + copy_lines = copy_lines[0:prepended_line_index + 1] + lines_to_be_shifted + prepended_line_index = prepended_line_index + 1 + edited_content = str_array_to_str(copy_lines) + with open(file, "w") as writer: + writer.write(edited_content) + + return has_match + + +def get_current_branch(working_dir: str) -> str: + repo = get_new_repo(working_dir) + return repo.active_branch + + +def remote_branch_exists(branch_name: str, working_dir: str) -> bool: + repo = get_new_repo(working_dir) + for rp in repo.references: + if rp.name.endswith(f"/{branch_name}"): + return True + return False + + +def local_branch_exists(branch_name: str, working_dir: str) -> bool: + repo = get_new_repo(working_dir) + for rp in repo.branches: + if rp.name == branch_name: + return True + return False + + +def branch_exists(branch_name: str, working_dir: str) -> bool: + return local_branch_exists(branch_name, working_dir) or remote_branch_exists(branch_name, working_dir) + + +def get_template_environment(template_dir: str) -> Environment: + file_loader = FileSystemLoader(template_dir) + env = Environment(loader=file_loader) + return env + + +def remove_cloned_code(exec_path: str): + release_all_repos() + if os.path.exists(f"{exec_path}"): + print(f"Deleting cloned code {exec_path} ...") + # https://stackoverflow.com/questions/51819472/git-cant-delete-local-branch-operation-not-permitted + # https://askubuntu.com/questions/1049142/cannot-delete-git-directory + # since git directory is readonly first we need to give write permission to delete git directory + if os.path.exists(f"{exec_path}/.git"): + run(f"chmod -R 777 {exec_path}/.git") + try: + run(f"rm -rf {exec_path}") + print("Done. Code deleted successfully.") + except: + print(f"Some files could not be deleted in directory {exec_path}. " + f"Please delete them manually or they will be deleted before next execution") diff --git a/packaging_automation/common_validations.py b/packaging_automation/common_validations.py new file mode 100644 index 00000000..4477ed31 --- /dev/null +++ b/packaging_automation/common_validations.py @@ -0,0 +1,30 @@ +import string_utils +from parameters_validation import parameter_validation +import re + +CITUS_MINOR_VERSION_PATTERN = r"\d{1,2}\.\d{1,2}" +CITUS_PATCH_VERSION_PATTERN = CITUS_MINOR_VERSION_PATTERN + r"\.\d{1,2}" + + +@parameter_validation +def is_version(version: str): + if not version: + raise ValueError("version should be non-empty and should not be None") + if not re.match(CITUS_PATCH_VERSION_PATTERN, version): + raise ValueError( + "version should include three level of digits separated by dots, e.g: 10.0.1") + + +@parameter_validation +def is_tag(tag: str): + if not tag: + raise ValueError("tag should be non-empty and should not be None") + if not re.match(f"v{CITUS_PATCH_VERSION_PATTERN}", tag): + raise ValueError( + "tag should start with 'v' and should include three level of digits separated by dots, e.g: v10.0.1") + + +@parameter_validation +def is_email(email: str): + if not string_utils.is_email(email): + raise ValueError("Parameter is not in email format") diff --git a/packaging_automation/prepare_release.py b/packaging_automation/prepare_release.py new file mode 100644 index 00000000..d3d8a19b --- /dev/null +++ b/packaging_automation/prepare_release.py @@ -0,0 +1,580 @@ +import os +import uuid +from dataclasses import dataclass +from datetime import datetime +import argparse +from enum import Enum + +import pathlib2 +from github import Github, Repository +from parameters_validation import (non_blank, non_empty) + +from .common_tool_methods import (get_version_details, get_upcoming_patch_version, is_major_release, + get_prs_for_patch_release, + filter_prs_by_label, cherry_pick_prs, run, replace_line_in_file, get_current_branch, + find_nth_matching_line_and_line_number, get_minor_version, get_patch_version_regex, + remote_branch_exists, local_branch_exists, prepend_line_in_file, + get_template_environment, get_upcoming_minor_version, remove_cloned_code) +from .common_validations import (CITUS_MINOR_VERSION_PATTERN, CITUS_PATCH_VERSION_PATTERN, is_version) + +MULTI_EXTENSION_SQL = "src/test/regress/sql/multi_extension.sql" +CITUS_CONTROL = "src/backend/distributed/citus.control" +MULTI_EXTENSION_OUT = "src/test/regress/expected/multi_extension.out" +CONFIG_PY = "src/test/regress/upgrade/config.py" +DISTRIBUTED_SQL_DIR_PATH = "src/backend/distributed/sql" +DOWNGRADES_DIR_PATH = f"{DISTRIBUTED_SQL_DIR_PATH}/downgrades" +CONFIGURE_IN = "configure.in" +CONFIGURE = "configure" +CITUS_CONTROL_SEARCH_PATTERN = r"^default_version*" + +MULTI_EXT_DEVEL_SEARCH_PATTERN = rf"^\s*{CITUS_MINOR_VERSION_PATTERN}devel$" +MULTI_EXT_PATCH_SEARCH_PATTERN = rf"^\s*{CITUS_PATCH_VERSION_PATTERN}$" + +MULTI_EXT_DETAIL_PREFIX = rf"DETAIL: Loaded library requires " +MULTI_EXT_DETAIL1_SUFFIX = rf", but 8.0-1 was specified." +MULTI_EXT_DETAIL2_SUFFIX = rf", but the installed extension version is 8.1-1." +MULTI_EXT_DETAIL1_PATTERN = rf"^{MULTI_EXT_DETAIL_PREFIX}\d+\.\d+{MULTI_EXT_DETAIL1_SUFFIX}$" + +MULTI_EXT_DETAIL2_PATTERN = ( + rf"^{MULTI_EXT_DETAIL_PREFIX}\d+\.\d+{MULTI_EXT_DETAIL2_SUFFIX}$") + +CONFIG_PY_MASTER_VERSION_SEARCH_PATTERN = r"^MASTER_VERSION = '\d+\.\d+'" + +CONFIGURE_IN_SEARCH_PATTERN = "AC_INIT*" +REPO_OWNER = "citusdata" + +BASE_PATH = pathlib2.Path(__file__).parent.absolute() +TEMPLATES_PATH = f"{BASE_PATH}/templates" + +MULTI_EXT_OUT_TEMPLATE_FILE = "multi_extension_out_prepare_release.tmpl" +MULTI_EXT_SQL_TEMPLATE_FILE = "multi_extension_sql_prepare_release.tmpl" + +repo_details = { + "citus": { + "configure-in-str": "Citus", + "branch": "master"}, + "citus-enterprise": { + "configure-in-str": "Citus Enterprise", + "branch": "enterprise master"}} + + +@dataclass +class UpdateReleaseReturnValue: + release_branch_name: str + upcoming_version_branch: str + upgrade_path_sql_file: str + downgrade_path_sql_file: str + + +@dataclass +class MajorReleaseParams: + configure_in_path: str + devel_version: str + is_test: bool + main_branch: str + multi_extension_out_path: str + project_name: str + project_version: str + release_branch_name: str + + +@dataclass +class UpcomingVersionBranchParams: + citus_control_file_path: str + config_py_path: str + configure_in_path: str + upcoming_devel_version: str + distributed_dir_path: str + downgrades_dir_path: str + is_test: bool + main_branch: str + multi_extension_out_path: str + multi_extension_sql_path: str + project_name: str + project_version: str + repository: Repository + upcoming_minor_version: str + upcoming_version_branch: str + + +@dataclass +class PatchReleaseParams: + cherry_pick_enabled: bool + configure_in_path: str + earliest_pr_date: datetime + is_test: bool + main_branch: str + citus_control_file_path: str + multi_extension_out_path: str + project_name: str + project_version: str + release_branch_name: str + schema_version: str + repository: Repository + + +BASE_GIT_PATH = pathlib2.Path(__file__).parents[1] + + +def update_release(github_token: non_blank(non_empty(str)), project_name: non_blank(non_empty(str)), + project_version: is_version(str), main_branch: non_blank(non_empty(str)), + earliest_pr_date: datetime, exec_path: non_blank(non_empty(str)), schema_version: str = "", + is_test: bool = False, cherry_pick_enabled: bool = False) -> UpdateReleaseReturnValue: + multi_extension_sql_path = f"{exec_path}/{MULTI_EXTENSION_SQL}" + citus_control_file_path = f"{exec_path}/{CITUS_CONTROL}" + multi_extension_out_path = f"{exec_path}/{MULTI_EXTENSION_OUT}" + configure_in_path = f"{exec_path}/{CONFIGURE_IN}" + config_py_path = f"{exec_path}/{CONFIG_PY}" + distributed_dir_path = f"{exec_path}/{DISTRIBUTED_SQL_DIR_PATH}" + downgrades_dir_path = f"{exec_path}/{DOWNGRADES_DIR_PATH}" + + project_version_details = get_version_details(project_version) + upcoming_minor_version = get_upcoming_minor_version(project_version) + upcoming_devel_version = f"{upcoming_minor_version}devel" + + release_branch_name = f'release-{project_version_details["major"]}.{project_version_details["minor"]}' + release_branch_name = f"{release_branch_name}-test" if is_test else release_branch_name + upcoming_version_branch = f"master-update-version-{uuid.uuid4()}" + + g = Github(github_token) + repository = g.get_repo(f"{REPO_OWNER}/{project_name}") + upgrade_file = "" + downgrade_file = "" + + # major release + if is_major_release(project_version): + print(f"### {project_version} is a major release. Executing Major release flow... ###") + major_release_params = MajorReleaseParams(configure_in_path=configure_in_path, + devel_version=upcoming_devel_version, + is_test=is_test, main_branch=main_branch, + multi_extension_out_path=multi_extension_out_path, + project_name=project_name, project_version=project_version, + release_branch_name=release_branch_name) + prepare_release_branch_for_major_release(major_release_params) + branch_params = UpcomingVersionBranchParams(project_version=project_version, + project_name=project_name, + upcoming_version_branch=upcoming_version_branch, + upcoming_devel_version=upcoming_devel_version, is_test=is_test, + main_branch=main_branch, + citus_control_file_path=citus_control_file_path, + config_py_path=config_py_path, + configure_in_path=configure_in_path, + distributed_dir_path=distributed_dir_path, + downgrades_dir_path=downgrades_dir_path, + repository=repository, + upcoming_minor_version=upcoming_minor_version, + multi_extension_out_path=multi_extension_out_path, + multi_extension_sql_path=multi_extension_sql_path) + + upgrade_file, downgrade_file = prepare_upcoming_version_branch(branch_params) + print(f"### Done {project_version} Major release flow executed successfully. ###") + # patch release + else: + patch_release_params = PatchReleaseParams(cherry_pick_enabled=cherry_pick_enabled, + configure_in_path=configure_in_path, + earliest_pr_date=earliest_pr_date, is_test=is_test, + main_branch=main_branch, + multi_extension_out_path=multi_extension_out_path, + project_name=project_name, project_version=project_version, + schema_version=schema_version, + citus_control_file_path=citus_control_file_path, + release_branch_name=release_branch_name, repository=repository) + prepare_release_branch_for_patch_release(patch_release_params) + return UpdateReleaseReturnValue(release_branch_name, upcoming_version_branch, + f"{DISTRIBUTED_SQL_DIR_PATH}/{upgrade_file}", + f"{DOWNGRADES_DIR_PATH}/{downgrade_file}") + + +def prepare_release_branch_for_patch_release(patchReleaseParams: PatchReleaseParams): + print(f"### {patchReleaseParams.project_version} is a patch release. Executing Patch release flow... ###") + # checkout release branch (release-X.Y) In test case release branch for test may not be exist. + # In this case create one + if patchReleaseParams.is_test: + + non_test_release_branch = patchReleaseParams.release_branch_name.rstrip("-test") + release_branch_exist = remote_branch_exists(non_test_release_branch, os.getcwd()) + test_release_branch_exist = local_branch_exists(patchReleaseParams.release_branch_name, os.getcwd()) + + if release_branch_exist: + run(f"git checkout {non_test_release_branch}") + run(f"git checkout -b {patchReleaseParams.release_branch_name}") + elif test_release_branch_exist: + run(f"git checkout {patchReleaseParams.release_branch_name}") + else: + run(f"git checkout -b {patchReleaseParams.release_branch_name}") + else: + checkout_branch(patchReleaseParams.release_branch_name, patchReleaseParams.is_test) + # change version info in configure.in file + update_version_in_configure_in(patchReleaseParams.project_name, patchReleaseParams.configure_in_path, + patchReleaseParams.project_version) + # execute "auto-conf " + execute_autoconf_f() + # change version info in multi_extension.out + update_version_in_multi_extension_out_for_patch(patchReleaseParams.multi_extension_out_path, + patchReleaseParams.project_version) + # if schema version is not empty update citus.control schema version + if patchReleaseParams.schema_version: + update_schema_version_in_citus_control(citus_control_file_path=patchReleaseParams.citus_control_file_path, + schema_version=patchReleaseParams.schema_version) + if patchReleaseParams.cherry_pick_enabled: + # cherry-pick the pr's with backport labels + cherrypick_prs_with_backport_labels(patchReleaseParams.earliest_pr_date, patchReleaseParams.main_branch, + patchReleaseParams.release_branch_name, patchReleaseParams.repository) + # commit all changes + commit_changes_for_version_bump(patchReleaseParams.project_name, patchReleaseParams.project_version) + # create and push release-$minor_version-push-$curTime branch + release_pr_branch = f"{patchReleaseParams.release_branch_name}_{uuid.uuid4()}" + create_and_checkout_branch(release_pr_branch) + if not patchReleaseParams.is_test: + push_branch(release_pr_branch) + + print(f"### Done Patch release flow executed successfully. ###") + + +def prepare_upcoming_version_branch(upcoming_params: UpcomingVersionBranchParams): + print(f"### {upcoming_params.upcoming_version_branch} flow is being executed... ###") + # checkout master + checkout_branch(upcoming_params.main_branch, upcoming_params.is_test) + # create master-update-version-$curtime branch + create_and_checkout_branch(upcoming_params.upcoming_version_branch) + # update version info with upcoming version on configure.in + update_version_in_configure_in(upcoming_params.project_name, upcoming_params.configure_in_path, + upcoming_params.upcoming_devel_version) + # update version info with upcoming version on config.py + update_version_with_upcoming_version_in_config_py(upcoming_params.config_py_path, + upcoming_params.upcoming_minor_version) + # execute autoconf -f + execute_autoconf_f() + # update version info with upcoming version on multi_extension.out + update_version_in_multi_extension_out(upcoming_params.multi_extension_out_path, + upcoming_params.upcoming_devel_version) + # update detail lines with minor version + update_detail_strings_in_multi_extension_out(upcoming_params.multi_extension_out_path, + upcoming_params.upcoming_minor_version) + # get current schema version from citus.control + current_schema_version = get_current_schema_from_citus_control(upcoming_params.citus_control_file_path) + # add downgrade script in multi_extension.sql file + add_downgrade_script_in_multi_extension_file(current_schema_version, + upcoming_params.multi_extension_sql_path, + upcoming_params.upcoming_minor_version, MULTI_EXT_SQL_TEMPLATE_FILE) + # add downgrade script in multi_extension.out file + add_downgrade_script_in_multi_extension_file(current_schema_version, + upcoming_params.multi_extension_out_path, + upcoming_params.upcoming_minor_version, MULTI_EXT_OUT_TEMPLATE_FILE) + # create a new sql file for upgrade path: + upgrade_file = create_new_sql_for_upgrade_path(current_schema_version, + upcoming_params.distributed_dir_path, + upcoming_params.upcoming_minor_version) + # create a new sql file for downgrade path: + downgrade_file = create_new_sql_for_downgrade_path(current_schema_version, + upcoming_params.downgrades_dir_path, + upcoming_params.upcoming_minor_version) + + # change version in citus.control file + default_upcoming_schema_version = f"{upcoming_params.upcoming_minor_version}-1" + update_schema_version_in_citus_control(upcoming_params.citus_control_file_path, + default_upcoming_schema_version) + # commit and push changes on master-update-version-$curtime branch + commit_changes_for_version_bump(upcoming_params.project_name, upcoming_params.upcoming_devel_version) + if not upcoming_params.is_test: + push_branch(upcoming_params.upcoming_version_branch) + + # create pull request + create_pull_request_for_upcoming_version_branch(upcoming_params.repository, upcoming_params.main_branch, + upcoming_params.upcoming_version_branch, + upcoming_params.upcoming_devel_version) + print(f"### Done {upcoming_params.upcoming_version_branch} flow executed. ###") + return upgrade_file, downgrade_file + + +def prepare_release_branch_for_major_release(majorReleaseParams: MajorReleaseParams): + print(f"### {majorReleaseParams.release_branch_name} release branch flow is being executed... ###") + # checkout master + checkout_branch(majorReleaseParams.main_branch, majorReleaseParams.is_test) + # create release branch in release-X.Y format + create_and_checkout_branch(majorReleaseParams.release_branch_name) + # change version info in configure.in file + update_version_in_configure_in(majorReleaseParams.project_name, majorReleaseParams.configure_in_path, + majorReleaseParams.project_version) + # execute "autoconf -f" + execute_autoconf_f() + # change version info in multi_extension.out + update_version_in_multi_extension_out(majorReleaseParams.multi_extension_out_path, + majorReleaseParams.project_version) + # commit all changes + commit_changes_for_version_bump(majorReleaseParams.project_name, majorReleaseParams.project_version) + # push release branch (No PR creation!!!) + if not majorReleaseParams.is_test: + push_branch(majorReleaseParams.release_branch_name) + print(f"### Done {majorReleaseParams.release_branch_name} release branch flow executed .###") + + +def cherrypick_prs_with_backport_labels(earliest_pr_date, main_branch, release_branch_name, repository): + print( + f"### Getting all PR with backport label after {datetime.strftime(earliest_pr_date, '%Y.%m.%d %H:%M')}... ### ") + prs_with_earliest_date = get_prs_for_patch_release(repository, earliest_pr_date, main_branch) + # get commits for selected prs with backport label + prs_with_backport = filter_prs_by_label(prs_with_earliest_date, "backport") + print(f"### Done {len(prs_with_backport)} PRs with backport label found. PR list is as below. ###") + for pr in prs_with_backport: + print(f"\tNo:{pr.number} Title:{pr.title}") + # cherrypick all commits with backport label + print(f"### Cherry-picking PRs to {release_branch_name}... ###") + cherry_pick_prs(prs_with_backport) + print(f"### Done Cherry pick completed for all PRs on branch {release_branch_name}. ###") + + +def create_pull_request_for_upcoming_version_branch(repository, main_branch, upcoming_version_branch, upcoming_version): + print(f"### Creating pull request for {upcoming_version_branch}... ###") + pr_result = repository.create_pull(title=f"Bump Citus to {upcoming_version}", base=main_branch, + head=upcoming_version_branch, body="") + print(f"### Done Pull request created. PR no:{pr_result.number} PR URL: {pr_result.url}. ### ") + + +def push_branch(upcoming_version_branch): + print(f"Pushing changes for {upcoming_version_branch} into remote origin... ###") + run(f"git push --set-upstream origin {upcoming_version_branch}") + print(f"### Done Changes pushed for {upcoming_version_branch}. ###") + + +def commit_changes_for_version_bump(project_name, project_version): + current_branch = get_current_branch(os.getcwd()) + print(f"### Committing changes for branch {current_branch}... ###") + run("git add .") + run(f' git commit -m "Bump {project_name} version to {project_version} "') + print(f"### Done Changes committed for {current_branch}. ###") + + +def update_schema_version_in_citus_control(citus_control_file_path, schema_version): + print(f"### Updating {citus_control_file_path} file with the version {schema_version}... ###") + if not replace_line_in_file(citus_control_file_path, CITUS_CONTROL_SEARCH_PATTERN, + f"default_version = '{schema_version}'"): + raise ValueError(f"{citus_control_file_path} does not have match for version") + print(f"### Done {citus_control_file_path} file is updated with the schema version {schema_version}. ###") + + +def add_downgrade_script_in_multi_extension_file(current_schema_version, + multi_extension_out_path, + upcoming_minor_version, template_file: str): + print(f"### Adding downgrade scripts from version {current_schema_version} to " + f"{upcoming_minor_version} on {multi_extension_out_path}... ### ") + env = get_template_environment(TEMPLATES_PATH) + template = env.get_template( + template_file) # multi_extension_out_prepare_release.tmpl multi_extension_sql_prepare_release.tmpl + string_to_prepend = ( + f"{template.render(current_schema_version=current_schema_version, upcoming_minor_version=f'{upcoming_minor_version}-1')}\n") + + if not prepend_line_in_file(multi_extension_out_path, + f"DROP TABLE prev_objects, extension_diff;", + string_to_prepend): + raise ValueError(f"Downgrade scripts could not be added in {multi_extension_out_path} since " + f"'DROP TABLE prev_objects, extension_diff;' script could not be found ") + print(f"### Done Test downgrade scripts successfully added in {multi_extension_out_path}. ###") + + +def get_current_schema_from_citus_control(citus_control_file_path: str) -> str: + print(f"### Reading current schema version from {citus_control_file_path}... ###") + current_schema_version = "" + with open(citus_control_file_path, "r") as cc_reader: + cc_file_content = cc_reader.read() + cc_line_number, cc_line = find_nth_matching_line_and_line_number(cc_file_content, CITUS_CONTROL_SEARCH_PATTERN, + 1) + schema_not_found = False + if len(cc_line) > 0: + line_parts = cc_line.split("=") + if len(line_parts) == 2: + current_schema_version = line_parts[1] + else: + schema_not_found = True + else: + schema_not_found = True + + if schema_not_found: + raise ValueError("Version info could not be found in citus.control file") + + current_schema_version = current_schema_version.strip(" '") + print(f"### Done Schema version is {current_schema_version}. ###") + return current_schema_version + + +def update_version_with_upcoming_version_in_config_py(config_py_path, upcoming_minor_version): + print(f"### Updating {config_py_path} file with the upcoming version {upcoming_minor_version}... ###") + if not replace_line_in_file(config_py_path, CONFIG_PY_MASTER_VERSION_SEARCH_PATTERN, + f"MASTER_VERSION = '{upcoming_minor_version}'"): + raise ValueError(f"{config_py_path} does not have match for version") + print(f"### Done {config_py_path} file updated with the upcoming version {upcoming_minor_version}. ###") + + +def update_version_in_multi_extension_out(multi_extension_out_path, project_version): + print(f"### Updating {multi_extension_out_path} file with the project version {project_version}... ###") + + if not replace_line_in_file(multi_extension_out_path, MULTI_EXT_DEVEL_SEARCH_PATTERN, + f" {project_version}"): + raise ValueError( + f"{multi_extension_out_path} does not contain the version with pattern {MULTI_EXT_DEVEL_SEARCH_PATTERN}") + print(f"### Done {multi_extension_out_path} file is updated with project version {project_version}. ###") + + +def update_detail_strings_in_multi_extension_out(multi_extension_out_path, minor_version): + print(f"### Updating {multi_extension_out_path} detail lines file with the project version {minor_version}... ###") + + if not replace_line_in_file(multi_extension_out_path, MULTI_EXT_DETAIL1_PATTERN, + f"{MULTI_EXT_DETAIL_PREFIX}{minor_version}{MULTI_EXT_DETAIL1_SUFFIX}"): + raise ValueError( + f"{multi_extension_out_path} does not contain the version with pattern {MULTI_EXT_DETAIL1_PATTERN}") + + if not replace_line_in_file(multi_extension_out_path, MULTI_EXT_DETAIL2_PATTERN, + f"{MULTI_EXT_DETAIL_PREFIX}{minor_version}{MULTI_EXT_DETAIL2_SUFFIX}"): + raise ValueError( + f"{multi_extension_out_path} does not contain the version with pattern {MULTI_EXT_DETAIL2_PATTERN}") + + print(f"### Done {multi_extension_out_path} detail lines updated with project version {minor_version}. ###") + + +def update_version_in_multi_extension_out_for_patch(multi_extension_out_path, project_version): + print(f"### Updating {multi_extension_out_path} file with the project version {project_version}... ###") + + if not replace_line_in_file(multi_extension_out_path, + get_patch_version_regex(project_version), + f" {project_version}"): + raise ValueError( + f"{multi_extension_out_path} does not contain the version with pattern {get_patch_version_regex(project_version)}") + print(f"### Done {multi_extension_out_path} file is updated with project version {project_version}. ###") + + +def execute_autoconf_f(): + print(f"### Executing autoconf -f command... ###") + run("autoconf -f") + print(f"### Done autoconf -f executed. ###") + + +def update_version_in_configure_in(project_name,configure_in_path, project_version): + print(f"### Updating version on file {configure_in_path}... ###") + if not replace_line_in_file(configure_in_path, CONFIGURE_IN_SEARCH_PATTERN, + f"AC_INIT([{repo_details[project_name]['configure-in-str']}], [{project_version}])"): + raise ValueError(f"{configure_in_path} does not have match for version") + print(f"### Done {configure_in_path} file is updated with project version {project_version}. ###") + + +def create_and_checkout_branch(release_branch_name): + print(f"### Creating release branch with name {release_branch_name} from {get_current_branch(os.getcwd())}... ###") + run(f'git checkout -b {release_branch_name}') + print(f"### Done {release_branch_name} created. ###") + + +def checkout_branch(branch_name, is_test): + print(f"### Checking out {branch_name}... ###") + run(f"git checkout {branch_name}") + if not is_test: + run(f"git pull") + + print(f"### Done {branch_name} checked out and pulled. ###") + + +def upgrade_sql_file_name(current_schema_version, upcoming_minor_version): + return f"citus--{current_schema_version}--{upcoming_minor_version}-1.sql" + + +def create_new_sql_for_upgrade_path(current_schema_version, distributed_dir_path, + upcoming_minor_version): + newly_created_sql_file = upgrade_sql_file_name(current_schema_version, upcoming_minor_version) + print(f"### Creating upgrade file {newly_created_sql_file}... ###") + with open(f"{distributed_dir_path}/{newly_created_sql_file}", "w") as f_writer: + content = f"/* citus--{current_schema_version}--{upcoming_minor_version}-1 */" + content = content + "\n\n" + content = content + f"-- bump version to {upcoming_minor_version}-1" + "\n\n" + f_writer.write(content) + print(f"### Done {newly_created_sql_file} created. ###") + return newly_created_sql_file + + +def create_new_sql_for_downgrade_path(current_schema_version, distributed_dir_path, + upcoming_minor_version): + newly_created_sql_file = f"citus--{upcoming_minor_version}-1--{current_schema_version}.sql" + print(f"### Creating downgrade file {newly_created_sql_file}... ###") + with open(f"{distributed_dir_path}/{newly_created_sql_file}", "w") as f_writer: + content = f"/* citus--{upcoming_minor_version}-1--{current_schema_version} */" + content = content + "\n" + content = ( + content + f"-- this is an empty downgrade path since " + f"{upgrade_sql_file_name(current_schema_version, upcoming_minor_version)} " + f"is empty for now" + "\n") + f_writer.write(content) + print(f"### Done {newly_created_sql_file} created. ###") + return newly_created_sql_file + + +CHECKOUT_DIR = "citus_temp" + + +def remove_cloned_code(exec_path: str): + if os.path.exists(f"{exec_path}"): + print(f"Deleting cloned code {exec_path} ...") + # https://stackoverflow.com/questions/51819472/git-cant-delete-local-branch-operation-not-permitted + # https://askubuntu.com/questions/1049142/cannot-delete-git-directory + # since git directory is readonly first we need to give write permission to delete git directory + run(f"chmod -R 777 {exec_path}/.git") + run(f"sudo rm -rf {exec_path}") + print("Done. Code deleted successfully.") + + +def initialize_env(exec_path: str, project_name: str): + remove_cloned_code(exec_path) + if not os.path.exists(CHECKOUT_DIR): + run(f"git clone https://github.com/citusdata/{project_name}.git {CHECKOUT_DIR}") + + +def validate_parameters(major_release_flag: bool): + if major_release_flag and arguments.cherry_pick_enabled: + raise ValueError("Cherry pick could be enabled only for patch release") + + if major_release_flag and arguments.earliest_pr_date: + raise ValueError("earliest_pr_date could not be used for major releases") + + if major_release_flag and arguments.schema_version: + raise ValueError("schema_version could not be set for major releases") + + if not major_release_flag and arguments.cherry_pick_enabled \ + and not arguments.earliest_pr_date: + raise ValueError( + "earliest_pr_date parameter could not be empty when cherry pick is enabled and release is major.") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument('--gh_token', required=True) + parser.add_argument('--prj_name', choices=["citus", "citus-enterprise"], required=True) + parser.add_argument('--prj_ver', required=True) + parser.add_argument('--main_branch') + parser.add_argument('--earliest_pr_date') + parser.add_argument('--cherry_pick_enabled', action="store_true") + parser.add_argument('--is_test', action="store_true") + parser.add_argument('--schema_version', nargs='?') + arguments = parser.parse_args() + is_test = False + execution_path = f"{os.getcwd()}/{CHECKOUT_DIR}" + major_release = is_major_release(arguments.prj_ver) + validate_parameters(major_release) + + try: + initialize_env(execution_path, arguments.prj_name) + + is_cherry_pick_enabled = arguments.cherry_pick_enabled + main_branch = arguments.main_branch if arguments.main_branch else repo_details[arguments.prj_name]["branch"] + print(f"Using main branch {main_branch} for the repo {arguments.prj_name}.") + os.chdir(execution_path) + print(f"Executing in path {execution_path}") + is_test = arguments.is_test + earliest_pr_date = None if major_release or not is_cherry_pick_enabled else datetime.strptime( + arguments.earliest_pr_date, + '%Y.%m.%d') + update_release(github_token=arguments.gh_token, project_name=arguments.prj_name, + project_version=arguments.prj_ver, + main_branch=main_branch, + earliest_pr_date=earliest_pr_date, + is_test=arguments.is_test, + cherry_pick_enabled=arguments.cherry_pick_enabled, exec_path=execution_path, + schema_version=arguments.schema_version) + finally: + if not is_test: + remove_cloned_code(execution_path) diff --git a/python/publish-packages-into-microsoft-packages.py b/packaging_automation/publish-packages-into-microsoft-packages.py similarity index 100% rename from python/publish-packages-into-microsoft-packages.py rename to packaging_automation/publish-packages-into-microsoft-packages.py diff --git a/packaging_automation/requirements.in b/packaging_automation/requirements.in new file mode 100644 index 00000000..462cd2ab --- /dev/null +++ b/packaging_automation/requirements.in @@ -0,0 +1,15 @@ +wheel +docker +GitPython +Jinja2 +pathlib2 +pycurl +PyGithub +pytest +python-string-utils +requests +urllib3 +gnupg +parameters_validation +PyYAML + diff --git a/packaging_automation/requirements.txt b/packaging_automation/requirements.txt new file mode 100644 index 00000000..d8596c13 --- /dev/null +++ b/packaging_automation/requirements.txt @@ -0,0 +1,203 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --generate-hashes +# +attrs==19.3.0 \ + --hash=sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c \ + --hash=sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72 + # via pytest +certifi==2019.11.28 \ + --hash=sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3 \ + --hash=sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f + # via requests +chardet==3.0.4 \ + --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ + --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 + # via requests +deprecated==1.2.12 \ + --hash=sha256:08452d69b6b5bc66e8330adde0a4f8642e969b9e1702904d137eeb29c8ffc771 \ + --hash=sha256:6d2de2de7931a968874481ef30208fd4e08da39177d61d3d4ebdf4366e7dbca1 + # via pygithub +docker==5.0.0 \ + --hash=sha256:3e8bc47534e0ca9331d72c32f2881bb13b93ded0bcdeab3c833fb7cf61c0a9a5 \ + --hash=sha256:fc961d622160e8021c10d1bcabc388c57d55fb1f917175afbe24af442e6879bd + # via -r requirements.in +gitdb==4.0.7 \ + --hash=sha256:6c4cc71933456991da20917998acbe6cf4fb41eeaab7d6d67fbc05ecd4c865b0 \ + --hash=sha256:96bf5c08b157a666fec41129e6d327235284cca4c81e92109260f353ba138005 + # via gitpython +gitpython==3.1.14 \ + --hash=sha256:3283ae2fba31c913d857e12e5ba5f9a7772bbc064ae2bb09efafa71b0dd4939b \ + --hash=sha256:be27633e7509e58391f10207cd32b2a6cf5b908f92d9cd30da2e514e1137af61 + # via -r requirements.in +gnupg==2.3.1 \ + --hash=sha256:8db5a05c369dbc231dab4c98515ce828f2dffdc14f1534441a6c59b71c6d2031 + # via -r requirements.in +idna==2.8 \ + --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \ + --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c + # via requests +iniconfig==1.1.1 \ + --hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \ + --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32 + # via pytest +jinja2==2.10.1 \ + --hash=sha256:065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013 \ + --hash=sha256:14dd6caf1527abb21f08f86c784eac40853ba93edb79552aa1e4b8aef1b61c7b + # via -r requirements.in +markupsafe==1.1.0 \ + --hash=sha256:048ef924c1623740e70204aa7143ec592504045ae4429b59c30054cb31e3c432 \ + --hash=sha256:130f844e7f5bdd8e9f3f42e7102ef1d49b2e6fdf0d7526df3f87281a532d8c8b \ + --hash=sha256:19f637c2ac5ae9da8bfd98cef74d64b7e1bb8a63038a3505cd182c3fac5eb4d9 \ + --hash=sha256:1b8a7a87ad1b92bd887568ce54b23565f3fd7018c4180136e1cf412b405a47af \ + --hash=sha256:1c25694ca680b6919de53a4bb3bdd0602beafc63ff001fea2f2fc16ec3a11834 \ + --hash=sha256:1f19ef5d3908110e1e891deefb5586aae1b49a7440db952454b4e281b41620cd \ + --hash=sha256:1fa6058938190ebe8290e5cae6c351e14e7bb44505c4a7624555ce57fbbeba0d \ + --hash=sha256:31cbb1359e8c25f9f48e156e59e2eaad51cd5242c05ed18a8de6dbe85184e4b7 \ + --hash=sha256:3e835d8841ae7863f64e40e19477f7eb398674da6a47f09871673742531e6f4b \ + --hash=sha256:4e97332c9ce444b0c2c38dd22ddc61c743eb208d916e4265a2a3b575bdccb1d3 \ + --hash=sha256:525396ee324ee2da82919f2ee9c9e73b012f23e7640131dd1b53a90206a0f09c \ + --hash=sha256:52b07fbc32032c21ad4ab060fec137b76eb804c4b9a1c7c7dc562549306afad2 \ + --hash=sha256:52ccb45e77a1085ec5461cde794e1aa037df79f473cbc69b974e73940655c8d7 \ + --hash=sha256:5c3fbebd7de20ce93103cb3183b47671f2885307df4a17a0ad56a1dd51273d36 \ + --hash=sha256:5e5851969aea17660e55f6a3be00037a25b96a9b44d2083651812c99d53b14d1 \ + --hash=sha256:5edfa27b2d3eefa2210fb2f5d539fbed81722b49f083b2c6566455eb7422fd7e \ + --hash=sha256:7d263e5770efddf465a9e31b78362d84d015cc894ca2c131901a4445eaa61ee1 \ + --hash=sha256:83381342bfc22b3c8c06f2dd93a505413888694302de25add756254beee8449c \ + --hash=sha256:857eebb2c1dc60e4219ec8e98dfa19553dae33608237e107db9c6078b1167856 \ + --hash=sha256:98e439297f78fca3a6169fd330fbe88d78b3bb72f967ad9961bcac0d7fdd1550 \ + --hash=sha256:bf54103892a83c64db58125b3f2a43df6d2cb2d28889f14c78519394feb41492 \ + --hash=sha256:d9ac82be533394d341b41d78aca7ed0e0f4ba5a2231602e2f05aa87f25c51672 \ + --hash=sha256:e982fe07ede9fada6ff6705af70514a52beb1b2c3d25d4e873e82114cf3c5401 \ + --hash=sha256:edce2ea7f3dfc981c4ddc97add8a61381d9642dc3273737e756517cc03e84dd6 \ + --hash=sha256:efdc45ef1afc238db84cb4963aa689c0408912a0239b0721cb172b4016eb31d6 \ + --hash=sha256:f137c02498f8b935892d5c0172560d7ab54bc45039de8805075e19079c639a9c \ + --hash=sha256:f82e347a72f955b7017a39708a3667f106e6ad4d10b25f237396a7115d8ed5fd \ + --hash=sha256:fb7c206e01ad85ce57feeaaa0bf784b97fa3cad0d4a5737bc5295785f5c613a1 + # via jinja2 +packaging==20.9 \ + --hash=sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5 \ + --hash=sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a + # via pytest +parameters-validation==1.2.0 \ + --hash=sha256:046d512a6a9b1d55cd1f51fec72df87dbb488152a6c4a9776347313e475f31d4 \ + --hash=sha256:1db2aed4681b6c388c7334f8aa0d4f9fb373e8eca43d0f0851fb17a1db9ced1d + # via -r requirements.in +pathlib2==2.3.5 \ + --hash=sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db \ + --hash=sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868 + # via -r requirements.in +pluggy==0.13.1 \ + --hash=sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0 \ + --hash=sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d + # via pytest +psutil==5.8.0 \ + --hash=sha256:0066a82f7b1b37d334e68697faba68e5ad5e858279fd6351c8ca6024e8d6ba64 \ + --hash=sha256:02b8292609b1f7fcb34173b25e48d0da8667bc85f81d7476584d889c6e0f2131 \ + --hash=sha256:0ae6f386d8d297177fd288be6e8d1afc05966878704dad9847719650e44fc49c \ + --hash=sha256:0c9ccb99ab76025f2f0bbecf341d4656e9c1351db8cc8a03ccd62e318ab4b5c6 \ + --hash=sha256:0dd4465a039d343925cdc29023bb6960ccf4e74a65ad53e768403746a9207023 \ + --hash=sha256:12d844996d6c2b1d3881cfa6fa201fd635971869a9da945cf6756105af73d2df \ + --hash=sha256:1bff0d07e76114ec24ee32e7f7f8d0c4b0514b3fae93e3d2aaafd65d22502394 \ + --hash=sha256:245b5509968ac0bd179287d91210cd3f37add77dad385ef238b275bad35fa1c4 \ + --hash=sha256:28ff7c95293ae74bf1ca1a79e8805fcde005c18a122ca983abf676ea3466362b \ + --hash=sha256:36b3b6c9e2a34b7d7fbae330a85bf72c30b1c827a4366a07443fc4b6270449e2 \ + --hash=sha256:52de075468cd394ac98c66f9ca33b2f54ae1d9bff1ef6b67a212ee8f639ec06d \ + --hash=sha256:5da29e394bdedd9144c7331192e20c1f79283fb03b06e6abd3a8ae45ffecee65 \ + --hash=sha256:61f05864b42fedc0771d6d8e49c35f07efd209ade09a5afe6a5059e7bb7bf83d \ + --hash=sha256:6223d07a1ae93f86451d0198a0c361032c4c93ebd4bf6d25e2fb3edfad9571ef \ + --hash=sha256:6323d5d845c2785efb20aded4726636546b26d3b577aded22492908f7c1bdda7 \ + --hash=sha256:6ffe81843131ee0ffa02c317186ed1e759a145267d54fdef1bc4ea5f5931ab60 \ + --hash=sha256:74f2d0be88db96ada78756cb3a3e1b107ce8ab79f65aa885f76d7664e56928f6 \ + --hash=sha256:74fb2557d1430fff18ff0d72613c5ca30c45cdbfcddd6a5773e9fc1fe9364be8 \ + --hash=sha256:90d4091c2d30ddd0a03e0b97e6a33a48628469b99585e2ad6bf21f17423b112b \ + --hash=sha256:90f31c34d25b1b3ed6c40cdd34ff122b1887a825297c017e4cbd6796dd8b672d \ + --hash=sha256:99de3e8739258b3c3e8669cb9757c9a861b2a25ad0955f8e53ac662d66de61ac \ + --hash=sha256:c6a5fd10ce6b6344e616cf01cc5b849fa8103fbb5ba507b6b2dee4c11e84c935 \ + --hash=sha256:ce8b867423291cb65cfc6d9c4955ee9bfc1e21fe03bb50e177f2b957f1c2469d \ + --hash=sha256:d225cd8319aa1d3c85bf195c4e07d17d3cd68636b8fc97e6cf198f782f99af28 \ + --hash=sha256:ea313bb02e5e25224e518e4352af4bf5e062755160f77e4b1767dd5ccb65f876 \ + --hash=sha256:ea372bcc129394485824ae3e3ddabe67dc0b118d262c568b4d2602a7070afdb0 \ + --hash=sha256:f4634b033faf0d968bb9220dd1c793b897ab7f1189956e1aa9eae752527127d3 \ + --hash=sha256:fcc01e900c1d7bee2a37e5d6e4f9194760a93597c97fee89c4ae51701de03563 + # via gnupg +py==1.10.0 \ + --hash=sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3 \ + --hash=sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a + # via pytest +pycurl==7.43.0.6 \ + --hash=sha256:8301518689daefa53726b59ded6b48f33751c383cf987b0ccfbbc4ed40281325 + # via -r requirements.in +pygithub==1.54.1 \ + --hash=sha256:300bc16e62886ca6537b0830e8f516ea4bc3ef12d308e0c5aff8bdbd099173d4 \ + --hash=sha256:87afd6a67ea582aa7533afdbf41635725f13d12581faed7e3e04b1579c0c0627 + # via -r requirements.in +pyjwt==1.7.1 \ + --hash=sha256:5c6eca3c2940464d106b99ba83b00c6add741c9becaec087fb7ccdefea71350e \ + --hash=sha256:8d59a976fb773f3e6a39c85636357c4f0e242707394cadadd9814f5cbaa20e96 + # via pygithub +pyparsing==2.4.7 \ + --hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \ + --hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b + # via packaging +pytest==6.2.3 \ + --hash=sha256:671238a46e4df0f3498d1c3270e5deb9b32d25134c99b7d75370a68cfbe9b634 \ + --hash=sha256:6ad9c7bdf517a808242b998ac20063c41532a570d088d77eec1ee12b0b5574bc + # via -r requirements.in +python-string-utils==1.0.0 \ + --hash=sha256:dcf9060b03f07647c0a603408dc8b03f807f3b54a05c6e19eb14460256fac0cb \ + --hash=sha256:f1a88700baf99db1a9b6953f44181ad9ca56623c81e257e6009707e2e7851fa4 + # via -r requirements.in +pyyaml==5.3.1 \ + --hash=sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97 \ + --hash=sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76 \ + --hash=sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2 \ + --hash=sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e \ + --hash=sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648 \ + --hash=sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf \ + --hash=sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f \ + --hash=sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2 \ + --hash=sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee \ + --hash=sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a \ + --hash=sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d \ + --hash=sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c \ + --hash=sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a + # via -r requirements.in +requests==2.22.0 \ + --hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \ + --hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31 + # via + # -r requirements.in + # docker + # pygithub +six==1.14.0 \ + --hash=sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a \ + --hash=sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c + # via pathlib2 +smmap==4.0.0 \ + --hash=sha256:7e65386bd122d45405ddf795637b7f7d2b532e7e401d46bbe3fb49b9986d5182 \ + --hash=sha256:a9a7479e4c572e2e775c404dcd3080c8dc49f39918c2cf74913d30c4c478e3c2 + # via gitdb +toml==0.10.2 \ + --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ + --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f + # via pytest +urllib3==1.25.8 \ + --hash=sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc \ + --hash=sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc + # via + # -r requirements.in + # requests +websocket-client==1.0.1 \ + --hash=sha256:3e2bf58191d4619b161389a95bdce84ce9e0b24eb8107e7e590db682c2d0ca81 \ + --hash=sha256:abf306dc6351dcef07f4d40453037e51cc5d9da2ef60d0fc5d0fe3bcda255372 + # via docker +wheel==0.36.2 \ + --hash=sha256:78b5b185f0e5763c26ca1e324373aadd49182ca90e825f7853f4b2509215dc0e \ + --hash=sha256:e11eefd162658ea59a60a0f6c7d493a7190ea4b9a85e335b33489d9f17e0245e + # via -r requirements.in +wrapt==1.12.1 \ + --hash=sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7 + # via deprecated diff --git a/packaging_automation/templates/multi_extension_out_prepare_release.tmpl b/packaging_automation/templates/multi_extension_out_prepare_release.tmpl new file mode 100644 index 00000000..4ca90db5 --- /dev/null +++ b/packaging_automation/templates/multi_extension_out_prepare_release.tmpl @@ -0,0 +1,15 @@ +-- Test downgrade to {{current_schema_version}} from {{upcoming_minor_version}} +ALTER EXTENSION citus UPDATE TO '{{upcoming_minor_version}}'; +ALTER EXTENSION citus UPDATE TO '{{current_schema_version}}'; +-- Should be empty result since upgrade+downgrade should be a no-op +SELECT * FROM print_extension_changes(); + previous_object | current_object +--------------------------------------------------------------------- +(0 rows) + +-- Snapshot of state at {{upcoming_minor_version}} +ALTER EXTENSION citus UPDATE TO '{{upcoming_minor_version}}'; +SELECT * FROM print_extension_changes(); + previous_object | current_object +--------------------------------------------------------------------- +(0 rows) diff --git a/packaging_automation/templates/multi_extension_sql_prepare_release.tmpl b/packaging_automation/templates/multi_extension_sql_prepare_release.tmpl new file mode 100644 index 00000000..ac9d3644 --- /dev/null +++ b/packaging_automation/templates/multi_extension_sql_prepare_release.tmpl @@ -0,0 +1,9 @@ +-- Test downgrade to {{current_schema_version}} from {{upcoming_minor_version}} +ALTER EXTENSION citus UPDATE TO '{{upcoming_minor_version}}'; +ALTER EXTENSION citus UPDATE TO '{{current_schema_version}}'; +-- Should be empty result since upgrade+downgrade should be a no-op +SELECT * FROM print_extension_changes(); + +-- Snapshot of state at {{upcoming_minor_version}} +ALTER EXTENSION citus UPDATE TO '{{upcoming_minor_version}}'; +SELECT * FROM print_extension_changes(); diff --git a/python/templates/pkgvars.tmpl b/packaging_automation/templates/pkgvars.tmpl similarity index 100% rename from python/templates/pkgvars.tmpl rename to packaging_automation/templates/pkgvars.tmpl diff --git a/python/templates/project.spec.tmpl b/packaging_automation/templates/project.spec.tmpl similarity index 100% rename from python/templates/project.spec.tmpl rename to packaging_automation/templates/project.spec.tmpl diff --git a/python/tests/__init__.py b/packaging_automation/tests/__init__.py similarity index 100% rename from python/tests/__init__.py rename to packaging_automation/tests/__init__.py diff --git a/python/tests/files/citus.spec b/packaging_automation/tests/files/citus.spec similarity index 100% rename from python/tests/files/citus.spec rename to packaging_automation/tests/files/citus.spec diff --git a/python/tests/files/citus_include_10_0_3.spec b/packaging_automation/tests/files/citus_include_10_0_3.spec similarity index 100% rename from python/tests/files/citus_include_10_0_3.spec rename to packaging_automation/tests/files/citus_include_10_0_3.spec diff --git a/python/tests/files/debian.changelog.refer b/packaging_automation/tests/files/debian.changelog.refer similarity index 100% rename from python/tests/files/debian.changelog.refer rename to packaging_automation/tests/files/debian.changelog.refer diff --git a/python/tests/files/debian.changelog_include_10_0_3.refer b/packaging_automation/tests/files/debian.changelog_include_10_0_3.refer similarity index 100% rename from python/tests/files/debian.changelog_include_10_0_3.refer rename to packaging_automation/tests/files/debian.changelog_include_10_0_3.refer diff --git a/python/tests/files/debian/changelog b/packaging_automation/tests/files/debian/changelog similarity index 100% rename from python/tests/files/debian/changelog rename to packaging_automation/tests/files/debian/changelog diff --git a/python/tests/files/pkgvars b/packaging_automation/tests/files/pkgvars similarity index 100% rename from python/tests/files/pkgvars rename to packaging_automation/tests/files/pkgvars diff --git a/python/tests/files/verify/debian_changelog_with_10.0.3.txt b/packaging_automation/tests/files/verify/debian_changelog_with_10.0.3.txt similarity index 100% rename from python/tests/files/verify/debian_changelog_with_10.0.3.txt rename to packaging_automation/tests/files/verify/debian_changelog_with_10.0.3.txt diff --git a/python/tests/files/verify/expected_changelog_10.0.3.txt b/packaging_automation/tests/files/verify/expected_changelog_10.0.3.txt similarity index 100% rename from python/tests/files/verify/expected_changelog_10.0.3.txt rename to packaging_automation/tests/files/verify/expected_changelog_10.0.3.txt diff --git a/python/tests/files/verify/expected_debian_latest_v10.0.3.txt b/packaging_automation/tests/files/verify/expected_debian_latest_v10.0.3.txt similarity index 100% rename from python/tests/files/verify/expected_debian_latest_v10.0.3.txt rename to packaging_automation/tests/files/verify/expected_debian_latest_v10.0.3.txt diff --git a/python/tests/files/verify/rpm_latest_changelog_reference.txt b/packaging_automation/tests/files/verify/rpm_latest_changelog_reference.txt similarity index 100% rename from python/tests/files/verify/rpm_latest_changelog_reference.txt rename to packaging_automation/tests/files/verify/rpm_latest_changelog_reference.txt diff --git a/packaging_automation/tests/projects/citus/configure.in b/packaging_automation/tests/projects/citus/configure.in new file mode 100644 index 00000000..4b2e1d6d --- /dev/null +++ b/packaging_automation/tests/projects/citus/configure.in @@ -0,0 +1,312 @@ +# Citus autoconf input script. +# +# Converted into an actual configure script by autogen.sh. This +# conversion only has to be done when configure.in changes. To avoid +# everyone needing autoconf installed, the resulting files are checked +# into the SCM. + +AC_INIT([Citus], [10.1devel]) +AC_COPYRIGHT([Copyright (c) Citus Data, Inc.]) + +# we'll need sed and awk for some of the version commands +AC_PROG_SED +AC_PROG_AWK + +# CITUS_NAME definition +AC_DEFINE_UNQUOTED(CITUS_NAME, "$PACKAGE_NAME", [Citus full name as a string]) + +case $PACKAGE_NAME in + 'Citus Enterprise') citus_edition=enterprise ;; + Citus) citus_edition=community ;; + *) AC_MSG_ERROR([Unrecognized package name.]) ;; +esac + +# CITUS_EDITION definition +AC_DEFINE_UNQUOTED(CITUS_EDITION, "$citus_edition", [Citus edition as a string]) + +# CITUS_MAJORVERSION definition +[CITUS_MAJORVERSION=`expr "$PACKAGE_VERSION" : '\([0-9][0-9]*\.[0-9][0-9]*\)'`] +AC_DEFINE_UNQUOTED(CITUS_MAJORVERSION, "$CITUS_MAJORVERSION", [Citus major version as a string]) + +# CITUS_VERSION definition +PGAC_ARG_REQ(with, extra-version, [STRING], [append STRING to version], + [CITUS_VERSION="$PACKAGE_VERSION$withval"], + [CITUS_VERSION="$PACKAGE_VERSION"]) +AC_DEFINE_UNQUOTED(CITUS_VERSION, "$CITUS_VERSION", [Citus version as a string]) + +# CITUS_VERSION_NUM definition +# awk -F is a regex on some platforms, and not on others, so make "." a tab +[CITUS_VERSION_NUM="`echo "$PACKAGE_VERSION" | sed 's/[A-Za-z].*$//' | +tr '.' ' ' | +$AWK '{printf "%d%02d%02d", $1, $2, (NF >= 3) ? $3 : 0}'`"] +AC_DEFINE_UNQUOTED(CITUS_VERSION_NUM, $CITUS_VERSION_NUM, [Citus version as a number]) + +# CITUS_EXTENSIONVERSION definition +[CITUS_EXTENSIONVERSION="`grep '^default_version' $srcdir/src/backend/distributed/citus.control | cut -d\' -f2`"] +AC_DEFINE_UNQUOTED([CITUS_EXTENSIONVERSION], "$CITUS_EXTENSIONVERSION", [Extension version expected by this Citus build]) + +# Re-check for flex. That allows to compile citus against a postgres +# which was built without flex available (possible because generated +# files are included) +AC_PATH_PROG([FLEX], [flex]) + +# Locate pg_config binary +AC_ARG_VAR([PG_CONFIG], [Location to find pg_config for target PostgreSQL instalation (default PATH)]) +AC_ARG_VAR([PATH], [PATH for target PostgreSQL install pg_config]) + +if test -z "$PG_CONFIG"; then + AC_PATH_PROG(PG_CONFIG, pg_config) +fi + +if test -z "$PG_CONFIG"; then + AC_MSG_ERROR([Could not find pg_config. Set PG_CONFIG or PATH.]) +fi + +# check we're building against a supported version of PostgreSQL +citusac_pg_config_version=$($PG_CONFIG --version 2>/dev/null) +version_num=$(echo "$citusac_pg_config_version"| + $SED -e 's/^PostgreSQL \([[0-9]]*\)\(\.[[0-9]]*\)\{0,1\}\(.*\)$/\1\2/') + +# if PostgreSQL version starts with two digits, the major version is those digits +version_num=$(echo "$version_num"| $SED -e 's/^\([[0-9]]\{2\}\)\(.*\)$/\1/') + +if test -z "$version_num"; then + AC_MSG_ERROR([Could not detect PostgreSQL version from pg_config.]) +fi + +if test "$version_num" != '12' -a "$version_num" != '13'; then + AC_MSG_ERROR([Citus is not compatible with the detected PostgreSQL version ${version_num}.]) +else + AC_MSG_NOTICE([building against PostgreSQL $version_num]) +fi; + +# Check whether we're building inside the source tree, if not, prepare +# the build directory. +if test "$srcdir" -ef '.' ; then + vpath_build=no +else + vpath_build=yes + _AS_ECHO_N([preparing build tree... ]) + citusac_abs_top_srcdir=`cd "$srcdir" && pwd` + $SHELL "$citusac_abs_top_srcdir/prep_buildtree" "$citusac_abs_top_srcdir" "." \ + || AC_MSG_ERROR(failed) + AC_MSG_RESULT(done) +fi +AC_SUBST(vpath_build) + +# Allow to overwrite the C compiler, default to the one postgres was +# compiled with. We don't want autoconf's default CFLAGS though, so save +# those. +SAVE_CFLAGS="$CFLAGS" +AC_PROG_CC([$($PG_CONFIG --cc)]) +CFLAGS="$SAVE_CFLAGS" + +host_guess=`${SHELL} $srcdir/config/config.guess` + +# Create compiler version string +if test x"$GCC" = x"yes" ; then + cc_string=`${CC} --version | sed q` + case $cc_string in [[A-Za-z]]*) ;; *) cc_string="GCC $cc_string";; esac +elif test x"$SUN_STUDIO_CC" = x"yes" ; then + cc_string=`${CC} -V 2>&1 | sed q` +else + cc_string=$CC +fi + +AC_CHECK_SIZEOF([void *]) + +AC_DEFINE_UNQUOTED(CITUS_VERSION_STR, + ["$PACKAGE_NAME $CITUS_VERSION on $host_guess, compiled by $cc_string, `expr $ac_cv_sizeof_void_p \* 8`-bit"], + [A string containing the version number, platform, and C compiler]) + +# Locate source and build directory of the postgres we're building +# against. Can't rely on either still being present, but e.g. optional +# test infrastructure can rely on it. +POSTGRES_SRCDIR=$(grep ^abs_top_srcdir $(dirname $($PG_CONFIG --pgxs))/../Makefile.global|cut -d ' ' -f3-) +POSTGRES_BUILDDIR=$(grep ^abs_top_builddir $(dirname $($PG_CONFIG --pgxs))/../Makefile.global|cut -d ' ' -f3-) + + +# check for a number of CFLAGS that make development easier + +# CITUSAC_PROG_CC_CFLAGS_OPT +# ----------------------- +# Given a string, check if the compiler supports the string as a +# command-line option. If it does, add the string to CFLAGS. +AC_DEFUN([CITUSAC_PROG_CC_CFLAGS_OPT], +[define([Ac_cachevar], [AS_TR_SH([citusac_cv_prog_cc_cflags_$1])])dnl +AC_CACHE_CHECK([whether $CC supports $1], [Ac_cachevar], +[citusac_save_CFLAGS=$CFLAGS +flag=$1 +case $flag in -Wno*) + flag=-W$(echo $flag | cut -c 6-) +esac +CFLAGS="$citusac_save_CFLAGS $flag" +ac_save_c_werror_flag=$ac_c_werror_flag +ac_c_werror_flag=yes +_AC_COMPILE_IFELSE([AC_LANG_PROGRAM()], + [Ac_cachevar=yes], + [Ac_cachevar=no]) +ac_c_werror_flag=$ac_save_c_werror_flag +CFLAGS="$citusac_save_CFLAGS"]) +if test x"$Ac_cachevar" = x"yes"; then + CITUS_CFLAGS="$CITUS_CFLAGS $1" +fi +undefine([Ac_cachevar])dnl +])# CITUSAC_PROG_CC_CFLAGS_OPT + +CITUSAC_PROG_CC_CFLAGS_OPT([-std=gnu99]) +CITUSAC_PROG_CC_CFLAGS_OPT([-Wall]) +CITUSAC_PROG_CC_CFLAGS_OPT([-Wextra]) +# disarm options included in the above, which are too noisy for now +CITUSAC_PROG_CC_CFLAGS_OPT([-Wno-unused-parameter]) +CITUSAC_PROG_CC_CFLAGS_OPT([-Wno-sign-compare]) +CITUSAC_PROG_CC_CFLAGS_OPT([-Wno-missing-field-initializers]) +CITUSAC_PROG_CC_CFLAGS_OPT([-Wno-clobbered]) +CITUSAC_PROG_CC_CFLAGS_OPT([-Wno-gnu-variable-sized-type-not-at-end]) +CITUSAC_PROG_CC_CFLAGS_OPT([-Wno-declaration-after-statement]) +# And add a few extra warnings +CITUSAC_PROG_CC_CFLAGS_OPT([-Wendif-labels]) +CITUSAC_PROG_CC_CFLAGS_OPT([-Wmissing-format-attribute]) +CITUSAC_PROG_CC_CFLAGS_OPT([-Wmissing-declarations]) +CITUSAC_PROG_CC_CFLAGS_OPT([-Wmissing-prototypes]) +CITUSAC_PROG_CC_CFLAGS_OPT([-Wshadow]) +CITUSAC_PROG_CC_CFLAGS_OPT([-Werror=vla]) # visual studio does not support these +CITUSAC_PROG_CC_CFLAGS_OPT([-Werror=implicit-int]) +CITUSAC_PROG_CC_CFLAGS_OPT([-Werror=implicit-function-declaration]) +CITUSAC_PROG_CC_CFLAGS_OPT([-Werror=return-type]) +# Security flags +# Flags taken from: https://liquid.microsoft.com/Web/Object/Read/ms.security/Requirements/Microsoft.Security.SystemsADM.10203#guide +# We do not enforce the following flag because it is only available on GCC>=8 +CITUSAC_PROG_CC_CFLAGS_OPT([-fstack-clash-protection]) + +# +# --enable-coverage enables generation of code coverage metrics with gcov +# +AC_ARG_ENABLE([coverage], AS_HELP_STRING([--enable-coverage], [build with coverage testing instrumentation])) +if test "$enable_coverage" = yes; then + CITUS_CFLAGS="$CITUS_CFLAGS -O0 -g --coverage" + CITUS_CPPFLAGS="$CITUS_CPPFLAGS -DNDEBUG" + CITUS_LDFLAGS="$CITUS_LDFLAGS --coverage" +fi + +# +# libcurl +# +PGAC_ARG_BOOL(with, libcurl, yes, + [do not use libcurl for anonymous statistics collection], + [AC_DEFINE([HAVE_LIBCURL], 1, [Define to 1 to build with libcurl support. (--with-libcurl)])]) + +if test "$with_libcurl" = yes; then + AC_CHECK_LIB(curl, curl_global_init, [], + [AC_MSG_ERROR([libcurl not found +If you have libcurl already installed, see config.log for details on the +failure. It is possible the compiler isn't looking in the proper directory. +Use --without-libcurl to disable anonymous statistics collection.])]) + AC_CHECK_HEADER(curl/curl.h, [], [AC_MSG_ERROR([libcurl header not found +If you have libcurl already installed, see config.log for details on the +failure. It is possible the compiler isn't looking in the proper directory. +Use --without-libcurl to disable libcurl support.])]) +fi + +# REPORTS_BASE_URL definition +PGAC_ARG_REQ(with, reports-hostname, [HOSTNAME], + [Use HOSTNAME as hostname for statistics collection and update checks], + [REPORTS_BASE_URL="https://${withval}"], + [REPORTS_BASE_URL="https://reports.citusdata.com"]) +AC_DEFINE_UNQUOTED(REPORTS_BASE_URL, "$REPORTS_BASE_URL", + [Base URL for statistics collection and update checks]) + +HAS_TABLEAM=yes +AC_DEFINE([HAS_TABLEAM], 1, [Define to 1 to build with table access method support, pg12 and up]) + +# Require lz4 & zstd only if we are compiling columnar +if test "$HAS_TABLEAM" == 'yes'; then + # + # LZ4 + # + PGAC_ARG_BOOL(with, lz4, yes, + [do not use lz4]) + AC_SUBST(with_lz4) + + if test "$with_lz4" = yes; then + AC_CHECK_LIB(lz4, LZ4_compress_default, [], + [AC_MSG_ERROR([lz4 library not found + If you have lz4 installed, see config.log for details on the + failure. It is possible the compiler isn't looking in the proper directory. + Use --without-lz4 to disable lz4 support.])]) + AC_CHECK_HEADER(lz4.h, [], [AC_MSG_ERROR([lz4 header not found + If you have lz4 already installed, see config.log for details on the + failure. It is possible the compiler isn't looking in the proper directory. + Use --without-lz4 to disable lz4 support.])]) + fi + + # + # ZSTD + # + PGAC_ARG_BOOL(with, zstd, yes, + [do not use zstd]) + AC_SUBST(with_zstd) + + if test "$with_zstd" = yes; then + AC_CHECK_LIB(zstd, ZSTD_decompress, [], + [AC_MSG_ERROR([zstd library not found + If you have zstd installed, see config.log for details on the + failure. It is possible the compiler isn't looking in the proper directory. + Use --without-zstd to disable zstd support.])]) + AC_CHECK_HEADER(zstd.h, [], [AC_MSG_ERROR([zstd header not found + If you have zstd already installed, see config.log for details on the + failure. It is possible the compiler isn't looking in the proper directory. + Use --without-zstd to disable zstd support.])]) + fi + +fi # test "$HAS_TABLEAM" == 'yes' + + +PGAC_ARG_BOOL(with, security-flags, no, + [use security flags]) +AC_SUBST(with_security_flags) + +if test "$with_security_flags" = yes; then +# Flags taken from: https://liquid.microsoft.com/Web/Object/Read/ms.security/Requirements/Microsoft.Security.SystemsADM.10203#guide + +# We always want to have some compiler flags for security concerns. +SECURITY_CFLAGS="-fstack-protector-strong -D_FORTIFY_SOURCE=2 -O2 -z noexecstack -fpic -shared -Wl,-z,relro -Wl,-z,now -Wformat -Wformat-security -Werror=format-security" +CITUS_CFLAGS="$CITUS_CFLAGS $SECURITY_CFLAGS" +AC_MSG_NOTICE([Blindly added security flags for linker: $SECURITY_CFLAGS]) + +# We always want to have some clang flags for security concerns. +# This doesn't include "-Wl,-z,relro -Wl,-z,now" on purpuse, because bitcode is not linked. +# This doesn't include -fsanitize=cfi because it breaks builds on many distros including +# Debian/Buster, Debian/Stretch, Ubuntu/Bionic, Ubuntu/Xenial and EL7. +SECURITY_BITCODE_CFLAGS="-fsanitize=safe-stack -fstack-protector-strong -flto -fPIC -Wformat -Wformat-security -Werror=format-security" +CITUS_BITCODE_CFLAGS="$CITUS_BITCODE_CFLAGS $SECURITY_BITCODE_CFLAGS" +AC_MSG_NOTICE([Blindly added security flags for llvm: $SECURITY_BITCODE_CFLAGS]) + +AC_MSG_WARN([If you run into issues during linking or bitcode compilation, you can use --without-security-flags.]) +fi + +# Check if git is installed, when installed the gitref of the checkout will be baked in the application +AC_PATH_PROG(GIT_BIN, git) +AC_CHECK_FILE(.git,[HAS_DOTGIT=yes], [HAS_DOTGIT=]) + +AC_SUBST(CITUS_CFLAGS, "$CITUS_CFLAGS") +AC_SUBST(CITUS_BITCODE_CFLAGS, "$CITUS_BITCODE_CFLAGS") +AC_SUBST(CITUS_CPPFLAGS, "$CITUS_CPPFLAGS") +AC_SUBST(CITUS_LDFLAGS, "$LIBS $CITUS_LDFLAGS") +AC_SUBST(POSTGRES_SRCDIR, "$POSTGRES_SRCDIR") +AC_SUBST(POSTGRES_BUILDDIR, "$POSTGRES_BUILDDIR") +AC_SUBST(HAS_DOTGIT, "$HAS_DOTGIT") +AC_SUBST(HAS_TABLEAM, "$HAS_TABLEAM") + +AC_CONFIG_FILES([Makefile.global]) +AC_CONFIG_HEADERS([src/include/citus_config.h] [src/include/citus_version.h]) +AH_TOP([ +/* + * citus_config.h.in is generated by autoconf/autoheader and + * converted into citus_config.h by configure. Include when code needs to + * depend on determinations made by configure. + * + * Do not manually edit! + */ +]) +AC_OUTPUT diff --git a/packaging_automation/tests/projects/citus/src/backend/distributed/citus.control b/packaging_automation/tests/projects/citus/src/backend/distributed/citus.control new file mode 100644 index 00000000..a004b145 --- /dev/null +++ b/packaging_automation/tests/projects/citus/src/backend/distributed/citus.control @@ -0,0 +1,6 @@ +# Citus extension +comment = 'Citus distributed database' +default_version = '10.1-1' +module_pathname = '$libdir/citus' +relocatable = false +schema = pg_catalog diff --git a/packaging_automation/tests/projects/citus/src/test/regress/expected/multi_extension.out b/packaging_automation/tests/projects/citus/src/test/regress/expected/multi_extension.out new file mode 100644 index 00000000..72256f54 --- /dev/null +++ b/packaging_automation/tests/projects/citus/src/test/regress/expected/multi_extension.out @@ -0,0 +1,911 @@ +-- +-- MULTI_EXTENSION +-- +-- Tests around extension creation / upgrades +-- +-- It'd be nice to script generation of this file, but alas, that's +-- not done yet. +-- differentiate the output file for pg11 and versions above, with regards to objects +-- created per citus version depending on the postgres version. Upgrade tests verify the +-- objects are added in citus_finish_pg_upgrade() +SHOW server_version \gset +SELECT substring(:'server_version', '\d+')::int > 11 AS version_above_eleven; + version_above_eleven +--------------------------------------------------------------------- + t +(1 row) + +SET citus.next_shard_id TO 580000; +SELECT $definition$ +CREATE OR REPLACE FUNCTION test.maintenance_worker() + RETURNS pg_stat_activity + LANGUAGE plpgsql +AS $$ +DECLARE + activity record; +BEGIN + DO 'BEGIN END'; -- Force maintenance daemon to start + -- we don't want to wait forever; loop will exit after 20 seconds + FOR i IN 1 .. 200 LOOP + PERFORM pg_stat_clear_snapshot(); + SELECT * INTO activity FROM pg_stat_activity + WHERE application_name = 'Citus Maintenance Daemon' AND datname = current_database(); + IF activity.pid IS NOT NULL THEN + RETURN activity; + ELSE + PERFORM pg_sleep(0.1); + END IF ; + END LOOP; + -- fail if we reach the end of this loop + raise 'Waited too long for maintenance daemon to start'; +END; +$$; +$definition$ create_function_test_maintenance_worker +\gset +CREATE TABLE prev_objects(description text); +CREATE TABLE extension_diff(previous_object text COLLATE "C", + current_object text COLLATE "C"); +CREATE FUNCTION print_extension_changes() +RETURNS TABLE(previous_object text, current_object text) +AS $func$ +BEGIN + TRUNCATE TABLE extension_diff; + + CREATE TABLE current_objects AS + SELECT pg_catalog.pg_describe_object(classid, objid, 0) AS description + FROM pg_catalog.pg_depend, pg_catalog.pg_extension e + WHERE refclassid = 'pg_catalog.pg_extension'::pg_catalog.regclass + AND refobjid = e.oid + AND deptype = 'e' + AND e.extname='citus'; + + INSERT INTO extension_diff + SELECT p.description previous_object, c.description current_object + FROM current_objects c FULL JOIN prev_objects p + ON p.description = c.description + WHERE p.description is null OR c.description is null; + + DROP TABLE prev_objects; + ALTER TABLE current_objects RENAME TO prev_objects; + + RETURN QUERY SELECT * FROM extension_diff ORDER BY 1, 2; +END +$func$ LANGUAGE plpgsql; +CREATE SCHEMA test; +:create_function_test_maintenance_worker +-- check maintenance daemon is started +SELECT datname, current_database(), + usename, (SELECT extowner::regrole::text FROM pg_extension WHERE extname = 'citus') +FROM test.maintenance_worker(); + datname | current_database | usename | extowner +--------------------------------------------------------------------- + regression | regression | postgres | postgres +(1 row) + +-- ensure no unexpected objects were created outside pg_catalog +SELECT pgio.type, pgio.identity +FROM pg_depend AS pgd, + pg_extension AS pge, + LATERAL pg_identify_object(pgd.classid, pgd.objid, pgd.objsubid) AS pgio +WHERE pgd.refclassid = 'pg_extension'::regclass AND + pgd.refobjid = pge.oid AND + pge.extname = 'citus' AND + pgio.schema NOT IN ('pg_catalog', 'citus', 'citus_internal', 'test', 'columnar') +ORDER BY 1, 2; + type | identity +--------------------------------------------------------------------- + view | public.citus_tables +(1 row) + +-- DROP EXTENSION pre-created by the regression suite +DROP EXTENSION citus; +\c +-- these tests switch between citus versions and call ddl's that require pg_dist_object to be created +SET citus.enable_object_propagation TO 'false'; +SET citus.enable_version_checks TO 'false'; +CREATE EXTENSION citus VERSION '8.0-1'; +ALTER EXTENSION citus UPDATE TO '8.0-2'; +ALTER EXTENSION citus UPDATE TO '8.0-3'; +ALTER EXTENSION citus UPDATE TO '8.0-4'; +ALTER EXTENSION citus UPDATE TO '8.0-5'; +ALTER EXTENSION citus UPDATE TO '8.0-6'; +ALTER EXTENSION citus UPDATE TO '8.0-7'; +ALTER EXTENSION citus UPDATE TO '8.0-8'; +ALTER EXTENSION citus UPDATE TO '8.0-9'; +ALTER EXTENSION citus UPDATE TO '8.0-10'; +ALTER EXTENSION citus UPDATE TO '8.0-11'; +ALTER EXTENSION citus UPDATE TO '8.0-12'; +ALTER EXTENSION citus UPDATE TO '8.0-13'; +ALTER EXTENSION citus UPDATE TO '8.1-1'; +ALTER EXTENSION citus UPDATE TO '8.2-1'; +ALTER EXTENSION citus UPDATE TO '8.2-2'; +ALTER EXTENSION citus UPDATE TO '8.2-3'; +ALTER EXTENSION citus UPDATE TO '8.2-4'; +ALTER EXTENSION citus UPDATE TO '8.3-1'; +ALTER EXTENSION citus UPDATE TO '9.0-1'; +ALTER EXTENSION citus UPDATE TO '9.0-2'; +ALTER EXTENSION citus UPDATE TO '9.1-1'; +ALTER EXTENSION citus UPDATE TO '9.2-1'; +ALTER EXTENSION citus UPDATE TO '9.2-2'; +-- Snapshot of state at 9.2-2 +SELECT * FROM print_extension_changes(); + previous_object | current_object +--------------------------------------------------------------------- + | event trigger citus_cascade_to_partition + | function alter_role_if_exists(text,text) + | function any_value(anyelement) + | function any_value_agg(anyelement,anyelement) + | function array_cat_agg(anyarray) + | function assign_distributed_transaction_id(integer,bigint,timestamp with time zone) + | function authinfo_valid(text) + | function broadcast_intermediate_result(text,text) + | function check_distributed_deadlocks() + | function citus_add_rebalance_strategy(name,regproc,regproc,regproc,real,real) + | function citus_blocking_pids(integer) + | function citus_create_restore_point(text) + | function citus_dist_stat_activity() + | function citus_drop_trigger() + | function citus_executor_name(integer) + | function citus_extradata_container(internal) + | function citus_finish_pg_upgrade() + | function citus_internal.find_groupid_for_node(text,integer) + | function citus_internal.pg_dist_node_trigger_func() + | function citus_internal.pg_dist_rebalance_strategy_enterprise_check() + | function citus_internal.pg_dist_rebalance_strategy_trigger_func() + | function citus_internal.pg_dist_shard_placement_trigger_func() + | function citus_internal.refresh_isolation_tester_prepared_statement() + | function citus_internal.replace_isolation_tester_func() + | function citus_internal.restore_isolation_tester_func() + | function citus_isolation_test_session_is_blocked(integer,integer[]) + | function citus_json_concatenate(json,json) + | function citus_json_concatenate_final(json) + | function citus_jsonb_concatenate(jsonb,jsonb) + | function citus_jsonb_concatenate_final(jsonb) + | function citus_node_capacity_1(integer) + | function citus_prepare_pg_upgrade() + | function citus_query_stats() + | function citus_relation_size(regclass) + | function citus_server_id() + | function citus_set_default_rebalance_strategy(text) + | function citus_shard_allowed_on_node_true(bigint,integer) + | function citus_shard_cost_1(bigint) + | function citus_shard_cost_by_disk_size(bigint) + | function citus_stat_statements() + | function citus_stat_statements_reset() + | function citus_table_is_visible(oid) + | function citus_table_size(regclass) + | function citus_text_send_as_jsonb(text) + | function citus_total_relation_size(regclass) + | function citus_truncate_trigger() + | function citus_validate_rebalance_strategy_functions(regproc,regproc,regproc) + | function citus_version() + | function citus_worker_stat_activity() + | function column_name_to_column(regclass,text) + | function column_to_column_name(regclass,text) + | function coord_combine_agg(oid,cstring,anyelement) + | function coord_combine_agg_ffunc(internal,oid,cstring,anyelement) + | function coord_combine_agg_sfunc(internal,oid,cstring,anyelement) + | function create_distributed_function(regprocedure,text,text) + | function create_distributed_table(regclass,text,citus.distribution_type,text) + | function create_intermediate_result(text,text) + | function create_reference_table(regclass) + | function distributed_tables_colocated(regclass,regclass) + | function dump_global_wait_edges() + | function dump_local_wait_edges() + | function fetch_intermediate_results(text[],text,integer) + | function get_all_active_transactions() + | function get_colocated_shard_array(bigint) + | function get_colocated_table_array(regclass) + | function get_current_transaction_id() + | function get_global_active_transactions() + | function get_rebalance_progress() + | function get_rebalance_table_shards_plan(regclass,real,integer,bigint[],boolean,name) + | function get_shard_id_for_distribution_column(regclass,"any") + | function isolate_tenant_to_new_shard(regclass,"any",text) + | function json_cat_agg(json) + | function jsonb_cat_agg(jsonb) + | function lock_relation_if_exists(text,text) + | function lock_shard_metadata(integer,bigint[]) + | function lock_shard_resources(integer,bigint[]) + | function mark_tables_colocated(regclass,regclass[]) + | function master_activate_node(text,integer) + | function master_add_inactive_node(text,integer,integer,noderole,name) + | function master_add_node(text,integer,integer,noderole,name) + | function master_add_secondary_node(text,integer,text,integer,name) + | function master_append_table_to_shard(bigint,text,text,integer) + | function master_apply_delete_command(text) + | function master_conninfo_cache_invalidate() + | function master_copy_shard_placement(bigint,text,integer,text,integer,boolean,citus.shard_transfer_mode) + | function master_create_distributed_table(regclass,text,citus.distribution_type) + | function master_create_empty_shard(text) + | function master_create_worker_shards(text,integer,integer) + | function master_disable_node(text,integer) + | function master_dist_local_group_cache_invalidate() + | function master_dist_node_cache_invalidate() + | function master_dist_object_cache_invalidate() + | function master_dist_partition_cache_invalidate() + | function master_dist_placement_cache_invalidate() + | function master_dist_shard_cache_invalidate() + | function master_drain_node(text,integer,citus.shard_transfer_mode,name) + | function master_drop_all_shards(regclass,text,text) + | function master_drop_sequences(text[]) + | function master_get_active_worker_nodes() + | function master_get_new_placementid() + | function master_get_new_shardid() + | function master_get_table_ddl_events(text) + | function master_get_table_metadata(text) + | function master_modify_multiple_shards(text) + | function master_move_shard_placement(bigint,text,integer,text,integer,citus.shard_transfer_mode) + | function master_remove_distributed_table_metadata_from_workers(regclass,text,text) + | function master_remove_node(text,integer) + | function master_remove_partition_metadata(regclass,text,text) + | function master_run_on_worker(text[],integer[],text[],boolean) + | function master_set_node_property(text,integer,text,boolean) + | function master_unmark_object_distributed(oid,oid,integer) + | function master_update_node(integer,text,integer,boolean,integer) + | function master_update_shard_statistics(bigint) + | function master_update_table_statistics(regclass) + | function poolinfo_valid(text) + | function read_intermediate_result(text,citus_copy_format) + | function read_intermediate_results(text[],citus_copy_format) + | function rebalance_table_shards(regclass,real,integer,bigint[],citus.shard_transfer_mode,boolean,name) + | function recover_prepared_transactions() + | function relation_is_a_known_shard(regclass) + | function replicate_table_shards(regclass,integer,integer,bigint[],citus.shard_transfer_mode) + | function role_exists(name) + | function run_command_on_colocated_placements(regclass,regclass,text,boolean) + | function run_command_on_placements(regclass,text,boolean) + | function run_command_on_shards(regclass,text,boolean) + | function run_command_on_workers(text,boolean) + | function shard_name(regclass,bigint) + | function start_metadata_sync_to_node(text,integer) + | function stop_metadata_sync_to_node(text,integer) + | function task_tracker_assign_task(bigint,integer,text) + | function task_tracker_cleanup_job(bigint) + | function task_tracker_conninfo_cache_invalidate() + | function task_tracker_task_status(bigint,integer) + | function upgrade_to_reference_table(regclass) + | function worker_append_table_to_shard(text,text,text,integer) + | function worker_apply_inter_shard_ddl_command(bigint,text,bigint,text,text) + | function worker_apply_sequence_command(text) + | function worker_apply_sequence_command(text,regtype) + | function worker_apply_shard_ddl_command(bigint,text) + | function worker_apply_shard_ddl_command(bigint,text,text) + | function worker_cleanup_job_schema_cache() + | function worker_create_or_replace_object(text) + | function worker_create_schema(bigint,text) + | function worker_create_truncate_trigger(regclass) + | function worker_drop_distributed_table(text) + | function worker_execute_sql_task(bigint,integer,text,boolean) + | function worker_fetch_foreign_file(text,text,bigint,text[],integer[]) + | function worker_fetch_partition_file(bigint,integer,integer,integer,text,integer) + | function worker_hash("any") + | function worker_hash_partition_table(bigint,integer,text,text,oid,anyarray) + | function worker_merge_files_and_run_query(bigint,integer,text,text) + | function worker_merge_files_into_table(bigint,integer,text[],text[]) + | function worker_partial_agg(oid,anyelement) + | function worker_partial_agg_ffunc(internal) + | function worker_partial_agg_sfunc(internal,oid,anyelement) + | function worker_partition_query_result(text,text,integer,citus.distribution_type,text[],text[],boolean) + | function worker_range_partition_table(bigint,integer,text,text,oid,anyarray) + | function worker_repartition_cleanup(bigint) + | schema citus + | schema citus_internal + | sequence pg_dist_colocationid_seq + | sequence pg_dist_groupid_seq + | sequence pg_dist_node_nodeid_seq + | sequence pg_dist_placement_placementid_seq + | sequence pg_dist_shardid_seq + | table citus.pg_dist_object + | table pg_dist_authinfo + | table pg_dist_colocation + | table pg_dist_local_group + | table pg_dist_node + | table pg_dist_node_metadata + | table pg_dist_partition + | table pg_dist_placement + | table pg_dist_poolinfo + | table pg_dist_rebalance_strategy + | table pg_dist_shard + | table pg_dist_transaction + | type citus.distribution_type + | type citus.shard_transfer_mode + | type citus_copy_format + | type noderole + | view citus_dist_stat_activity + | view citus_lock_waits + | view citus_shard_indexes_on_worker + | view citus_shards_on_worker + | view citus_stat_statements + | view citus_worker_stat_activity + | view pg_dist_shard_placement +(188 rows) + +-- Test downgrade to 9.2-2 from 9.2-4 +ALTER EXTENSION citus UPDATE TO '9.2-4'; +ALTER EXTENSION citus UPDATE TO '9.2-2'; +-- Should be empty result since upgrade+downgrade should be a no-op +SELECT * FROM print_extension_changes(); + previous_object | current_object +--------------------------------------------------------------------- +(0 rows) + +/* + * As we mistakenly bumped schema version to 9.3-1 in a bad release, we support + * updating citus schema from 9.3-1 to 9.2-4, but we do not support updates to 9.3-1. + * + * Hence the query below should fail. + */ +ALTER EXTENSION citus UPDATE TO '9.3-1'; +ERROR: extension "citus" has no update path from version "9.2-2" to version "9.3-1" +ALTER EXTENSION citus UPDATE TO '9.2-4'; +-- Snapshot of state at 9.2-4 +SELECT * FROM print_extension_changes(); + previous_object | current_object +--------------------------------------------------------------------- +(0 rows) + +-- Test downgrade to 9.2-4 from 9.3-2 +ALTER EXTENSION citus UPDATE TO '9.3-2'; +ALTER EXTENSION citus UPDATE TO '9.2-4'; +-- Should be empty result since upgrade+downgrade should be a no-op +SELECT * FROM print_extension_changes(); + previous_object | current_object +--------------------------------------------------------------------- +(0 rows) + +-- Snapshot of state at 9.3-2 +ALTER EXTENSION citus UPDATE TO '9.3-2'; +SELECT * FROM print_extension_changes(); + previous_object | current_object +--------------------------------------------------------------------- + | function citus_remote_connection_stats() + | function replicate_reference_tables() + | function truncate_local_data_after_distributing_table(regclass) + | function update_distributed_table_colocation(regclass,text) + | function worker_create_or_alter_role(text,text,text) +(5 rows) + +-- Test downgrade to 9.3-2 from 9.4-1 +ALTER EXTENSION citus UPDATE TO '9.4-1'; +ALTER EXTENSION citus UPDATE TO '9.3-2'; +-- Should be empty result since upgrade+downgrade should be a no-op +SELECT * FROM print_extension_changes(); + previous_object | current_object +--------------------------------------------------------------------- +(0 rows) + +-- Snapshot of state at 9.4-1 +ALTER EXTENSION citus UPDATE TO '9.4-1'; +SELECT * FROM print_extension_changes(); + previous_object | current_object +--------------------------------------------------------------------- + | function worker_last_saved_explain_analyze() + | function worker_save_query_explain_analyze(text,jsonb) +(2 rows) + +-- Test downgrade to 9.4-1 from 9.5-1 +ALTER EXTENSION citus UPDATE TO '9.5-1'; +BEGIN; + SELECT master_add_node('localhost', :master_port, groupId=>0); + master_add_node +--------------------------------------------------------------------- + 1 +(1 row) + + CREATE TABLE citus_local_table (a int); + SELECT create_citus_local_table('citus_local_table'); +NOTICE: create_citus_local_table is deprecated in favour of citus_add_local_table_to_metadata + create_citus_local_table +--------------------------------------------------------------------- + +(1 row) + + -- downgrade from 9.5-1 to 9.4-1 should fail as we have a citus local table + ALTER EXTENSION citus UPDATE TO '9.4-1'; +ERROR: citus local tables are introduced in Citus 9.5 +HINT: To downgrade Citus to an older version, you should first convert each citus local table to a postgres table by executing SELECT undistribute_table("%s") +CONTEXT: PL/pgSQL function inline_code_block line 11 at RAISE +ROLLBACK; +-- now we can downgrade as there is no citus local table +ALTER EXTENSION citus UPDATE TO '9.4-1'; +-- Should be empty result since upgrade+downgrade should be a no-op +SELECT * FROM print_extension_changes(); + previous_object | current_object +--------------------------------------------------------------------- +(0 rows) + +-- Snapshot of state at 9.5-1 +ALTER EXTENSION citus UPDATE TO '9.5-1'; +SELECT * FROM print_extension_changes(); + previous_object | current_object +--------------------------------------------------------------------- + function master_drop_sequences(text[]) | + function task_tracker_assign_task(bigint,integer,text) | + function task_tracker_cleanup_job(bigint) | + function task_tracker_conninfo_cache_invalidate() | + function task_tracker_task_status(bigint,integer) | + function worker_execute_sql_task(bigint,integer,text,boolean) | + function worker_merge_files_and_run_query(bigint,integer,text,text) | + | function create_citus_local_table(regclass) + | function undistribute_table(regclass) + | function worker_record_sequence_dependency(regclass,regclass,name) +(10 rows) + +-- Test downgrade to 9.5-1 from 10.0-1 +ALTER EXTENSION citus UPDATE TO '10.0-1'; +ALTER EXTENSION citus UPDATE TO '9.5-1'; +-- Should be empty result since upgrade+downgrade should be a no-op +SELECT * FROM print_extension_changes(); + previous_object | current_object +--------------------------------------------------------------------- +(0 rows) + +-- Snapshot of state at 10.0-1 +ALTER EXTENSION citus UPDATE TO '10.0-1'; +SELECT * FROM print_extension_changes(); + previous_object | current_object +--------------------------------------------------------------------- + function citus_total_relation_size(regclass) | + function create_citus_local_table(regclass) | + function mark_tables_colocated(regclass,regclass[]) | + function master_conninfo_cache_invalidate() | + function master_create_distributed_table(regclass,text,citus.distribution_type) | + function master_create_worker_shards(text,integer,integer) | + function master_dist_local_group_cache_invalidate() | + function master_dist_node_cache_invalidate() | + function master_dist_object_cache_invalidate() | + function master_dist_partition_cache_invalidate() | + function master_dist_placement_cache_invalidate() | + function master_dist_shard_cache_invalidate() | + function master_drop_all_shards(regclass,text,text) | + function master_modify_multiple_shards(text) | + function undistribute_table(regclass) | + function upgrade_to_reference_table(regclass) | + | access method columnar + | function alter_columnar_table_reset(regclass,boolean,boolean,boolean,boolean) + | function alter_columnar_table_set(regclass,integer,integer,name,integer) + | function alter_distributed_table(regclass,text,integer,text,boolean) + | function alter_old_partitions_set_access_method(regclass,timestamp with time zone,name) + | function alter_table_set_access_method(regclass,text) + | function citus_activate_node(text,integer) + | function citus_add_inactive_node(text,integer,integer,noderole,name) + | function citus_add_local_table_to_metadata(regclass,boolean) + | function citus_add_node(text,integer,integer,noderole,name) + | function citus_add_secondary_node(text,integer,text,integer,name) + | function citus_conninfo_cache_invalidate() + | function citus_copy_shard_placement(bigint,text,integer,text,integer,boolean,citus.shard_transfer_mode) + | function citus_disable_node(text,integer) + | function citus_dist_local_group_cache_invalidate() + | function citus_dist_node_cache_invalidate() + | function citus_dist_object_cache_invalidate() + | function citus_dist_partition_cache_invalidate() + | function citus_dist_placement_cache_invalidate() + | function citus_dist_shard_cache_invalidate() + | function citus_drain_node(text,integer,citus.shard_transfer_mode,name) + | function citus_drop_all_shards(regclass,text,text) + | function citus_internal.columnar_ensure_objects_exist() + | function citus_move_shard_placement(bigint,text,integer,text,integer,citus.shard_transfer_mode) + | function citus_remove_node(text,integer) + | function citus_set_coordinator_host(text,integer,noderole,name) + | function citus_set_node_property(text,integer,text,boolean) + | function citus_shard_sizes() + | function citus_total_relation_size(regclass,boolean) + | function citus_unmark_object_distributed(oid,oid,integer) + | function citus_update_node(integer,text,integer,boolean,integer) + | function citus_update_shard_statistics(bigint) + | function citus_update_table_statistics(regclass) + | function columnar.columnar_handler(internal) + | function fix_pre_citus10_partitioned_table_constraint_names() + | function fix_pre_citus10_partitioned_table_constraint_names(regclass) + | function notify_constraint_dropped() + | function remove_local_tables_from_metadata() + | function time_partition_range(regclass) + | function undistribute_table(regclass,boolean) + | function worker_change_sequence_dependency(regclass,regclass,regclass) + | function worker_fix_pre_citus10_partitioned_table_constraint_names(regclass,bigint,text) + | schema columnar + | sequence columnar.storageid_seq + | table columnar.chunk + | table columnar.chunk_group + | table columnar.options + | table columnar.stripe + | view citus_shards + | view citus_tables + | view time_partitions +(67 rows) + +-- Test downgrade to 10.0-1 from 10.0-2 +ALTER EXTENSION citus UPDATE TO '10.0-2'; +ALTER EXTENSION citus UPDATE TO '10.0-1'; +-- Should be empty result since upgrade+downgrade should be a no-op +SELECT * FROM print_extension_changes(); + previous_object | current_object +--------------------------------------------------------------------- +(0 rows) + +-- Snapshot of state at 10.0-2 +ALTER EXTENSION citus UPDATE TO '10.0-2'; +SELECT * FROM print_extension_changes(); + previous_object | current_object +--------------------------------------------------------------------- +(0 rows) + +-- Test downgrade to 10.0-2 from 10.0-3 +ALTER EXTENSION citus UPDATE TO '10.0-3'; +ALTER EXTENSION citus UPDATE TO '10.0-2'; +-- Should be empty result since upgrade+downgrade should be a no-op +SELECT * FROM print_extension_changes(); + previous_object | current_object +--------------------------------------------------------------------- +(0 rows) + +-- Snapshot of state at 10.0-3 +ALTER EXTENSION citus UPDATE TO '10.0-3'; +SELECT * FROM print_extension_changes(); + previous_object | current_object +--------------------------------------------------------------------- + | function citus_get_active_worker_nodes() +(1 row) + +-- Test downgrade to 10.0-3 from 10.1-1 +ALTER EXTENSION citus UPDATE TO '10.1-1'; +ALTER EXTENSION citus UPDATE TO '10.0-3'; +-- Should be empty result since upgrade+downgrade should be a no-op +SELECT * FROM print_extension_changes(); + previous_object | current_object +--------------------------------------------------------------------- +(0 rows) + +-- Snapshot of state at 10.1-1 +ALTER EXTENSION citus UPDATE TO '10.1-1'; +SELECT * FROM print_extension_changes(); + previous_object | current_object +--------------------------------------------------------------------- + function create_distributed_table(regclass,text,citus.distribution_type,text) | + | function create_distributed_table(regclass,text,citus.distribution_type,text,integer) +(2 rows) + +DROP TABLE prev_objects, extension_diff; +-- show running version +SHOW citus.version; + citus.version +--------------------------------------------------------------------- + 10.1devel +(1 row) + +-- ensure no unexpected objects were created outside pg_catalog +SELECT pgio.type, pgio.identity +FROM pg_depend AS pgd, + pg_extension AS pge, + LATERAL pg_identify_object(pgd.classid, pgd.objid, pgd.objsubid) AS pgio +WHERE pgd.refclassid = 'pg_extension'::regclass AND + pgd.refobjid = pge.oid AND + pge.extname = 'citus' AND + pgio.schema NOT IN ('pg_catalog', 'citus', 'citus_internal', 'test', 'columnar') +ORDER BY 1, 2; + type | identity +--------------------------------------------------------------------- + view | public.citus_tables +(1 row) + +-- see incompatible version errors out +RESET citus.enable_version_checks; +DROP EXTENSION citus; +CREATE EXTENSION citus VERSION '8.0-1'; +ERROR: specified version incompatible with loaded Citus library +DETAIL: Loaded library requires 10.1, but 8.0-1 was specified. +HINT: If a newer library is present, restart the database and try the command again. +-- Test non-distributed queries work even in version mismatch +SET citus.enable_version_checks TO 'false'; +CREATE EXTENSION citus VERSION '8.1-1'; +SET citus.enable_version_checks TO 'true'; +-- Test CREATE TABLE +CREATE TABLE version_mismatch_table(column1 int); +-- Test COPY +\copy version_mismatch_table FROM STDIN; +-- Test INSERT +INSERT INTO version_mismatch_table(column1) VALUES(5); +-- Test SELECT +SELECT * FROM version_mismatch_table ORDER BY column1; + column1 +--------------------------------------------------------------------- + 0 + 1 + 2 + 3 + 4 + 5 +(6 rows) + +-- Test SELECT from pg_catalog +SELECT d.datname as "Name", + pg_catalog.pg_get_userbyid(d.datdba) as "Owner", + pg_catalog.array_to_string(d.datacl, E'\n') AS "Access privileges" +FROM pg_catalog.pg_database d +ORDER BY 1; + Name | Owner | Access privileges +--------------------------------------------------------------------- + postgres | postgres | + regression | postgres | + template0 | postgres | =c/postgres + + | | postgres=CTc/postgres + template1 | postgres | =c/postgres + + | | postgres=CTc/postgres +(4 rows) + +-- We should not distribute table in version mistmatch +SELECT create_distributed_table('version_mismatch_table', 'column1'); +ERROR: loaded Citus library version differs from installed extension version +DETAIL: Loaded library requires 10.1, but the installed extension version is 8.1-1. +HINT: Run ALTER EXTENSION citus UPDATE and try again. +-- This function will cause fail in next ALTER EXTENSION +CREATE OR REPLACE FUNCTION pg_catalog.relation_is_a_known_shard(regclass) +RETURNS void LANGUAGE plpgsql +AS $function$ +BEGIN +END; +$function$; +ERROR: cannot change return type of existing function +HINT: Use DROP FUNCTION relation_is_a_known_shard(regclass) first. +SET citus.enable_version_checks TO 'false'; +-- This will fail because of previous function declaration +ALTER EXTENSION citus UPDATE TO '8.1-1'; +NOTICE: version "8.1-1" of extension "citus" is already installed +-- We can DROP problematic function and continue ALTER EXTENSION even when version checks are on +SET citus.enable_version_checks TO 'true'; +DROP FUNCTION pg_catalog.relation_is_a_known_shard(regclass); +ERROR: cannot drop function relation_is_a_known_shard(regclass) because extension citus requires it +HINT: You can drop extension citus instead. +SET citus.enable_version_checks TO 'false'; +ALTER EXTENSION citus UPDATE TO '8.1-1'; +NOTICE: version "8.1-1" of extension "citus" is already installed +-- Test updating to the latest version without specifying the version number +ALTER EXTENSION citus UPDATE; +-- re-create in newest version +DROP EXTENSION citus; +\c +CREATE EXTENSION citus; +-- test cache invalidation in workers +\c - - - :worker_1_port +DROP EXTENSION citus; +SET citus.enable_version_checks TO 'false'; +CREATE EXTENSION citus VERSION '8.0-1'; +SET citus.enable_version_checks TO 'true'; +-- during ALTER EXTENSION, we should invalidate the cache +ALTER EXTENSION citus UPDATE; +-- if cache is invalidated succesfull, this \d should work without any problem +\d + List of relations + Schema | Name | Type | Owner +--------------------------------------------------------------------- + public | citus_tables | view | postgres +(1 row) + +\c - - - :master_port +-- test https://github.com/citusdata/citus/issues/3409 +CREATE USER testuser2 SUPERUSER; +NOTICE: not propagating CREATE ROLE/USER commands to worker nodes +HINT: Connect to worker nodes directly to manually create all necessary users and roles. +SET ROLE testuser2; +DROP EXTENSION Citus; +-- Loop until we see there's no maintenance daemon running +DO $$begin + for i in 0 .. 100 loop + if i = 100 then raise 'Waited too long'; end if; + PERFORM pg_stat_clear_snapshot(); + perform * from pg_stat_activity where application_name = 'Citus Maintenance Daemon'; + if not found then exit; end if; + perform pg_sleep(0.1); + end loop; +end$$; +SELECT datid, datname, usename FROM pg_stat_activity WHERE application_name = 'Citus Maintenance Daemon'; + datid | datname | usename +--------------------------------------------------------------------- +(0 rows) + +CREATE EXTENSION Citus; +-- Loop until we there's a maintenance daemon running +DO $$begin + for i in 0 .. 100 loop + if i = 100 then raise 'Waited too long'; end if; + PERFORM pg_stat_clear_snapshot(); + perform * from pg_stat_activity where application_name = 'Citus Maintenance Daemon'; + if found then exit; end if; + perform pg_sleep(0.1); + end loop; +end$$; +SELECT datid, datname, usename FROM pg_stat_activity WHERE application_name = 'Citus Maintenance Daemon'; + datid | datname | usename +--------------------------------------------------------------------- + 16384 | regression | testuser2 +(1 row) + +RESET ROLE; +-- check that maintenance daemon gets (re-)started for the right user +DROP EXTENSION citus; +CREATE USER testuser SUPERUSER; +SET ROLE testuser; +CREATE EXTENSION citus; +SELECT datname, current_database(), + usename, (SELECT extowner::regrole::text FROM pg_extension WHERE extname = 'citus') +FROM test.maintenance_worker(); + datname | current_database | usename | extowner +--------------------------------------------------------------------- + regression | regression | testuser | testuser +(1 row) + +-- and recreate as the right owner +RESET ROLE; +DROP EXTENSION citus; +CREATE EXTENSION citus; +-- Check that maintenance daemon can also be started in another database +CREATE DATABASE another; +NOTICE: Citus partially supports CREATE DATABASE for distributed databases +DETAIL: Citus does not propagate CREATE DATABASE command to workers +HINT: You can manually create a database and its extensions on workers. +\c another +CREATE EXTENSION citus; +CREATE SCHEMA test; +:create_function_test_maintenance_worker +-- see that the daemon started +SELECT datname, current_database(), + usename, (SELECT extowner::regrole::text FROM pg_extension WHERE extname = 'citus') +FROM test.maintenance_worker(); + datname | current_database | usename | extowner +--------------------------------------------------------------------- + another | another | postgres | postgres +(1 row) + +-- Test that database with active worker can be dropped. +\c regression +CREATE SCHEMA test_daemon; +-- we create a similar function on the regression database +-- note that this function checks for the existence of the daemon +-- when not found, returns true else tries for 5 times and +-- returns false +CREATE OR REPLACE FUNCTION test_daemon.maintenance_daemon_died(p_dbname text) + RETURNS boolean + LANGUAGE plpgsql +AS $$ +DECLARE + activity record; +BEGIN + PERFORM pg_stat_clear_snapshot(); + SELECT * INTO activity FROM pg_stat_activity + WHERE application_name = 'Citus Maintenance Daemon' AND datname = p_dbname; + IF activity.pid IS NULL THEN + RETURN true; + ELSE + RETURN false; + END IF; +END; +$$; +-- drop the database and see that the daemon is dead +DROP DATABASE another; +SELECT + * +FROM + test_daemon.maintenance_daemon_died('another'); + maintenance_daemon_died +--------------------------------------------------------------------- + t +(1 row) + +-- we don't need the schema and the function anymore +DROP SCHEMA test_daemon CASCADE; +NOTICE: drop cascades to function test_daemon.maintenance_daemon_died(text) +-- verify citus does not crash while creating a table when run against an older worker +-- create_distributed_table piggybacks multiple commands into single one, if one worker +-- did not have the required UDF it should fail instead of crash. +-- create a test database, configure citus with single node +CREATE DATABASE another; +NOTICE: Citus partially supports CREATE DATABASE for distributed databases +DETAIL: Citus does not propagate CREATE DATABASE command to workers +HINT: You can manually create a database and its extensions on workers. +\c - - - :worker_1_port +CREATE DATABASE another; +NOTICE: Citus partially supports CREATE DATABASE for distributed databases +DETAIL: Citus does not propagate CREATE DATABASE command to workers +HINT: You can manually create a database and its extensions on workers. +\c - - - :master_port +\c another +CREATE EXTENSION citus; +SET citus.enable_object_propagation TO off; -- prevent distributed transactions during add node +SELECT FROM master_add_node('localhost', :worker_1_port); +WARNING: citus.enable_object_propagation is off, not creating distributed objects on worker +DETAIL: distributed objects are only kept in sync when citus.enable_object_propagation is set to on. Newly activated nodes will not get these objects created +-- +(1 row) + +\c - - - :worker_1_port +CREATE EXTENSION citus; +ALTER FUNCTION assign_distributed_transaction_id(initiator_node_identifier integer, transaction_number bigint, transaction_stamp timestamp with time zone) +RENAME TO dummy_assign_function; +\c - - - :master_port +SET citus.shard_replication_factor to 1; +-- create_distributed_table command should fail +CREATE TABLE t1(a int, b int); +SET client_min_messages TO ERROR; +DO $$ +BEGIN + BEGIN + SELECT create_distributed_table('t1', 'a'); + EXCEPTION WHEN OTHERS THEN + RAISE 'create distributed table failed'; + END; +END; +$$; +ERROR: create distributed table failed +CONTEXT: PL/pgSQL function inline_code_block line 6 at RAISE +\c regression +\c - - - :master_port +DROP DATABASE another; +\c - - - :worker_1_port +DROP DATABASE another; +\c - - - :master_port +-- only the regression database should have a maintenance daemon +SELECT count(*) FROM pg_stat_activity WHERE application_name = 'Citus Maintenance Daemon'; + count +--------------------------------------------------------------------- + 1 +(1 row) + +-- recreate the extension immediately after the maintenancae daemon errors +SELECT pg_cancel_backend(pid) FROM pg_stat_activity WHERE application_name = 'Citus Maintenance Daemon'; + pg_cancel_backend +--------------------------------------------------------------------- + t +(1 row) + +DROP EXTENSION citus; +CREATE EXTENSION citus; +-- wait for maintenance daemon restart +SELECT datname, current_database(), + usename, (SELECT extowner::regrole::text FROM pg_extension WHERE extname = 'citus') +FROM test.maintenance_worker(); + datname | current_database | usename | extowner +--------------------------------------------------------------------- + regression | regression | postgres | postgres +(1 row) + +-- confirm that there is only one maintenance daemon +SELECT count(*) FROM pg_stat_activity WHERE application_name = 'Citus Maintenance Daemon'; + count +--------------------------------------------------------------------- + 1 +(1 row) + +-- kill the maintenance daemon +SELECT pg_cancel_backend(pid) FROM pg_stat_activity WHERE application_name = 'Citus Maintenance Daemon'; + pg_cancel_backend +--------------------------------------------------------------------- + t +(1 row) + +-- reconnect +\c - - - :master_port +-- run something that goes through planner hook and therefore kicks of maintenance daemon +SELECT 1; + ?column? +--------------------------------------------------------------------- + 1 +(1 row) + +-- wait for maintenance daemon restart +SELECT datname, current_database(), + usename, (SELECT extowner::regrole::text FROM pg_extension WHERE extname = 'citus') +FROM test.maintenance_worker(); + datname | current_database | usename | extowner +--------------------------------------------------------------------- + regression | regression | postgres | postgres +(1 row) + +-- confirm that there is only one maintenance daemon +SELECT count(*) FROM pg_stat_activity WHERE application_name = 'Citus Maintenance Daemon'; + count +--------------------------------------------------------------------- + 1 +(1 row) + +DROP TABLE version_mismatch_table; diff --git a/packaging_automation/tests/projects/citus/src/test/regress/sql/multi_extension.sql b/packaging_automation/tests/projects/citus/src/test/regress/sql/multi_extension.sql new file mode 100644 index 00000000..20e0e1df --- /dev/null +++ b/packaging_automation/tests/projects/citus/src/test/regress/sql/multi_extension.sql @@ -0,0 +1,498 @@ +-- +-- MULTI_EXTENSION +-- +-- Tests around extension creation / upgrades +-- +-- It'd be nice to script generation of this file, but alas, that's +-- not done yet. + +-- differentiate the output file for pg11 and versions above, with regards to objects +-- created per citus version depending on the postgres version. Upgrade tests verify the +-- objects are added in citus_finish_pg_upgrade() +SHOW server_version \gset +SELECT substring(:'server_version', '\d+')::int > 11 AS version_above_eleven; + +SET citus.next_shard_id TO 580000; + +SELECT $definition$ +CREATE OR REPLACE FUNCTION test.maintenance_worker() + RETURNS pg_stat_activity + LANGUAGE plpgsql +AS $$ +DECLARE + activity record; +BEGIN + DO 'BEGIN END'; -- Force maintenance daemon to start + -- we don't want to wait forever; loop will exit after 20 seconds + FOR i IN 1 .. 200 LOOP + PERFORM pg_stat_clear_snapshot(); + SELECT * INTO activity FROM pg_stat_activity + WHERE application_name = 'Citus Maintenance Daemon' AND datname = current_database(); + IF activity.pid IS NOT NULL THEN + RETURN activity; + ELSE + PERFORM pg_sleep(0.1); + END IF ; + END LOOP; + -- fail if we reach the end of this loop + raise 'Waited too long for maintenance daemon to start'; +END; +$$; +$definition$ create_function_test_maintenance_worker +\gset + +CREATE TABLE prev_objects(description text); +CREATE TABLE extension_diff(previous_object text COLLATE "C", + current_object text COLLATE "C"); + +CREATE FUNCTION print_extension_changes() +RETURNS TABLE(previous_object text, current_object text) +AS $func$ +BEGIN + TRUNCATE TABLE extension_diff; + + CREATE TABLE current_objects AS + SELECT pg_catalog.pg_describe_object(classid, objid, 0) AS description + FROM pg_catalog.pg_depend, pg_catalog.pg_extension e + WHERE refclassid = 'pg_catalog.pg_extension'::pg_catalog.regclass + AND refobjid = e.oid + AND deptype = 'e' + AND e.extname='citus'; + + INSERT INTO extension_diff + SELECT p.description previous_object, c.description current_object + FROM current_objects c FULL JOIN prev_objects p + ON p.description = c.description + WHERE p.description is null OR c.description is null; + + DROP TABLE prev_objects; + ALTER TABLE current_objects RENAME TO prev_objects; + + RETURN QUERY SELECT * FROM extension_diff ORDER BY 1, 2; +END +$func$ LANGUAGE plpgsql; + +CREATE SCHEMA test; +:create_function_test_maintenance_worker + +-- check maintenance daemon is started +SELECT datname, current_database(), + usename, (SELECT extowner::regrole::text FROM pg_extension WHERE extname = 'citus') +FROM test.maintenance_worker(); + +-- ensure no unexpected objects were created outside pg_catalog +SELECT pgio.type, pgio.identity +FROM pg_depend AS pgd, + pg_extension AS pge, + LATERAL pg_identify_object(pgd.classid, pgd.objid, pgd.objsubid) AS pgio +WHERE pgd.refclassid = 'pg_extension'::regclass AND + pgd.refobjid = pge.oid AND + pge.extname = 'citus' AND + pgio.schema NOT IN ('pg_catalog', 'citus', 'citus_internal', 'test', 'columnar') +ORDER BY 1, 2; + + +-- DROP EXTENSION pre-created by the regression suite +DROP EXTENSION citus; +\c + +-- these tests switch between citus versions and call ddl's that require pg_dist_object to be created +SET citus.enable_object_propagation TO 'false'; + +SET citus.enable_version_checks TO 'false'; + +CREATE EXTENSION citus VERSION '8.0-1'; +ALTER EXTENSION citus UPDATE TO '8.0-2'; +ALTER EXTENSION citus UPDATE TO '8.0-3'; +ALTER EXTENSION citus UPDATE TO '8.0-4'; +ALTER EXTENSION citus UPDATE TO '8.0-5'; +ALTER EXTENSION citus UPDATE TO '8.0-6'; +ALTER EXTENSION citus UPDATE TO '8.0-7'; +ALTER EXTENSION citus UPDATE TO '8.0-8'; +ALTER EXTENSION citus UPDATE TO '8.0-9'; +ALTER EXTENSION citus UPDATE TO '8.0-10'; +ALTER EXTENSION citus UPDATE TO '8.0-11'; +ALTER EXTENSION citus UPDATE TO '8.0-12'; +ALTER EXTENSION citus UPDATE TO '8.0-13'; +ALTER EXTENSION citus UPDATE TO '8.1-1'; +ALTER EXTENSION citus UPDATE TO '8.2-1'; +ALTER EXTENSION citus UPDATE TO '8.2-2'; +ALTER EXTENSION citus UPDATE TO '8.2-3'; +ALTER EXTENSION citus UPDATE TO '8.2-4'; +ALTER EXTENSION citus UPDATE TO '8.3-1'; +ALTER EXTENSION citus UPDATE TO '9.0-1'; +ALTER EXTENSION citus UPDATE TO '9.0-2'; +ALTER EXTENSION citus UPDATE TO '9.1-1'; +ALTER EXTENSION citus UPDATE TO '9.2-1'; +ALTER EXTENSION citus UPDATE TO '9.2-2'; +-- Snapshot of state at 9.2-2 +SELECT * FROM print_extension_changes(); + +-- Test downgrade to 9.2-2 from 9.2-4 +ALTER EXTENSION citus UPDATE TO '9.2-4'; +ALTER EXTENSION citus UPDATE TO '9.2-2'; +-- Should be empty result since upgrade+downgrade should be a no-op +SELECT * FROM print_extension_changes(); + +/* + * As we mistakenly bumped schema version to 9.3-1 in a bad release, we support + * updating citus schema from 9.3-1 to 9.2-4, but we do not support updates to 9.3-1. + * + * Hence the query below should fail. + */ +ALTER EXTENSION citus UPDATE TO '9.3-1'; + +ALTER EXTENSION citus UPDATE TO '9.2-4'; +-- Snapshot of state at 9.2-4 +SELECT * FROM print_extension_changes(); + +-- Test downgrade to 9.2-4 from 9.3-2 +ALTER EXTENSION citus UPDATE TO '9.3-2'; +ALTER EXTENSION citus UPDATE TO '9.2-4'; +-- Should be empty result since upgrade+downgrade should be a no-op +SELECT * FROM print_extension_changes(); + +-- Snapshot of state at 9.3-2 +ALTER EXTENSION citus UPDATE TO '9.3-2'; +SELECT * FROM print_extension_changes(); + +-- Test downgrade to 9.3-2 from 9.4-1 +ALTER EXTENSION citus UPDATE TO '9.4-1'; +ALTER EXTENSION citus UPDATE TO '9.3-2'; +-- Should be empty result since upgrade+downgrade should be a no-op +SELECT * FROM print_extension_changes(); + +-- Snapshot of state at 9.4-1 +ALTER EXTENSION citus UPDATE TO '9.4-1'; +SELECT * FROM print_extension_changes(); + +-- Test downgrade to 9.4-1 from 9.5-1 +ALTER EXTENSION citus UPDATE TO '9.5-1'; + +BEGIN; + SELECT master_add_node('localhost', :master_port, groupId=>0); + CREATE TABLE citus_local_table (a int); + SELECT create_citus_local_table('citus_local_table'); + + -- downgrade from 9.5-1 to 9.4-1 should fail as we have a citus local table + ALTER EXTENSION citus UPDATE TO '9.4-1'; +ROLLBACK; + +-- now we can downgrade as there is no citus local table +ALTER EXTENSION citus UPDATE TO '9.4-1'; + +-- Should be empty result since upgrade+downgrade should be a no-op +SELECT * FROM print_extension_changes(); + +-- Snapshot of state at 9.5-1 +ALTER EXTENSION citus UPDATE TO '9.5-1'; +SELECT * FROM print_extension_changes(); + +-- Test downgrade to 9.5-1 from 10.0-1 +ALTER EXTENSION citus UPDATE TO '10.0-1'; +ALTER EXTENSION citus UPDATE TO '9.5-1'; +-- Should be empty result since upgrade+downgrade should be a no-op +SELECT * FROM print_extension_changes(); + +-- Snapshot of state at 10.0-1 +ALTER EXTENSION citus UPDATE TO '10.0-1'; +SELECT * FROM print_extension_changes(); + +-- Test downgrade to 10.0-1 from 10.0-2 +ALTER EXTENSION citus UPDATE TO '10.0-2'; +ALTER EXTENSION citus UPDATE TO '10.0-1'; +-- Should be empty result since upgrade+downgrade should be a no-op +SELECT * FROM print_extension_changes(); + +-- Snapshot of state at 10.0-2 +ALTER EXTENSION citus UPDATE TO '10.0-2'; +SELECT * FROM print_extension_changes(); + +-- Test downgrade to 10.0-2 from 10.0-3 +ALTER EXTENSION citus UPDATE TO '10.0-3'; +ALTER EXTENSION citus UPDATE TO '10.0-2'; +-- Should be empty result since upgrade+downgrade should be a no-op +SELECT * FROM print_extension_changes(); + +-- Snapshot of state at 10.0-3 +ALTER EXTENSION citus UPDATE TO '10.0-3'; +SELECT * FROM print_extension_changes(); + +-- Test downgrade to 10.0-3 from 10.1-1 +ALTER EXTENSION citus UPDATE TO '10.1-1'; +ALTER EXTENSION citus UPDATE TO '10.0-3'; +-- Should be empty result since upgrade+downgrade should be a no-op +SELECT * FROM print_extension_changes(); + +-- Snapshot of state at 10.1-1 +ALTER EXTENSION citus UPDATE TO '10.1-1'; +SELECT * FROM print_extension_changes(); + +DROP TABLE prev_objects, extension_diff; + +-- show running version +SHOW citus.version; + +-- ensure no unexpected objects were created outside pg_catalog +SELECT pgio.type, pgio.identity +FROM pg_depend AS pgd, + pg_extension AS pge, + LATERAL pg_identify_object(pgd.classid, pgd.objid, pgd.objsubid) AS pgio +WHERE pgd.refclassid = 'pg_extension'::regclass AND + pgd.refobjid = pge.oid AND + pge.extname = 'citus' AND + pgio.schema NOT IN ('pg_catalog', 'citus', 'citus_internal', 'test', 'columnar') +ORDER BY 1, 2; + +-- see incompatible version errors out +RESET citus.enable_version_checks; +DROP EXTENSION citus; +CREATE EXTENSION citus VERSION '8.0-1'; + +-- Test non-distributed queries work even in version mismatch +SET citus.enable_version_checks TO 'false'; +CREATE EXTENSION citus VERSION '8.1-1'; +SET citus.enable_version_checks TO 'true'; + +-- Test CREATE TABLE +CREATE TABLE version_mismatch_table(column1 int); + +-- Test COPY +\copy version_mismatch_table FROM STDIN; +0 +1 +2 +3 +4 +\. + +-- Test INSERT +INSERT INTO version_mismatch_table(column1) VALUES(5); + +-- Test SELECT +SELECT * FROM version_mismatch_table ORDER BY column1; + +-- Test SELECT from pg_catalog +SELECT d.datname as "Name", + pg_catalog.pg_get_userbyid(d.datdba) as "Owner", + pg_catalog.array_to_string(d.datacl, E'\n') AS "Access privileges" +FROM pg_catalog.pg_database d +ORDER BY 1; + +-- We should not distribute table in version mistmatch +SELECT create_distributed_table('version_mismatch_table', 'column1'); + +-- This function will cause fail in next ALTER EXTENSION +CREATE OR REPLACE FUNCTION pg_catalog.relation_is_a_known_shard(regclass) +RETURNS void LANGUAGE plpgsql +AS $function$ +BEGIN +END; +$function$; + +SET citus.enable_version_checks TO 'false'; +-- This will fail because of previous function declaration +ALTER EXTENSION citus UPDATE TO '8.1-1'; + +-- We can DROP problematic function and continue ALTER EXTENSION even when version checks are on +SET citus.enable_version_checks TO 'true'; +DROP FUNCTION pg_catalog.relation_is_a_known_shard(regclass); + +SET citus.enable_version_checks TO 'false'; +ALTER EXTENSION citus UPDATE TO '8.1-1'; + +-- Test updating to the latest version without specifying the version number +ALTER EXTENSION citus UPDATE; + +-- re-create in newest version +DROP EXTENSION citus; +\c +CREATE EXTENSION citus; + +-- test cache invalidation in workers +\c - - - :worker_1_port + +DROP EXTENSION citus; +SET citus.enable_version_checks TO 'false'; +CREATE EXTENSION citus VERSION '8.0-1'; +SET citus.enable_version_checks TO 'true'; +-- during ALTER EXTENSION, we should invalidate the cache +ALTER EXTENSION citus UPDATE; + +-- if cache is invalidated succesfull, this \d should work without any problem +\d + +\c - - - :master_port + +-- test https://github.com/citusdata/citus/issues/3409 +CREATE USER testuser2 SUPERUSER; +SET ROLE testuser2; +DROP EXTENSION Citus; +-- Loop until we see there's no maintenance daemon running +DO $$begin + for i in 0 .. 100 loop + if i = 100 then raise 'Waited too long'; end if; + PERFORM pg_stat_clear_snapshot(); + perform * from pg_stat_activity where application_name = 'Citus Maintenance Daemon'; + if not found then exit; end if; + perform pg_sleep(0.1); + end loop; +end$$; +SELECT datid, datname, usename FROM pg_stat_activity WHERE application_name = 'Citus Maintenance Daemon'; +CREATE EXTENSION Citus; +-- Loop until we there's a maintenance daemon running +DO $$begin + for i in 0 .. 100 loop + if i = 100 then raise 'Waited too long'; end if; + PERFORM pg_stat_clear_snapshot(); + perform * from pg_stat_activity where application_name = 'Citus Maintenance Daemon'; + if found then exit; end if; + perform pg_sleep(0.1); + end loop; +end$$; +SELECT datid, datname, usename FROM pg_stat_activity WHERE application_name = 'Citus Maintenance Daemon'; +RESET ROLE; + +-- check that maintenance daemon gets (re-)started for the right user +DROP EXTENSION citus; +CREATE USER testuser SUPERUSER; +SET ROLE testuser; +CREATE EXTENSION citus; + +SELECT datname, current_database(), + usename, (SELECT extowner::regrole::text FROM pg_extension WHERE extname = 'citus') +FROM test.maintenance_worker(); + +-- and recreate as the right owner +RESET ROLE; +DROP EXTENSION citus; +CREATE EXTENSION citus; + + +-- Check that maintenance daemon can also be started in another database +CREATE DATABASE another; +\c another +CREATE EXTENSION citus; + +CREATE SCHEMA test; +:create_function_test_maintenance_worker + +-- see that the daemon started +SELECT datname, current_database(), + usename, (SELECT extowner::regrole::text FROM pg_extension WHERE extname = 'citus') +FROM test.maintenance_worker(); + +-- Test that database with active worker can be dropped. +\c regression + +CREATE SCHEMA test_daemon; + +-- we create a similar function on the regression database +-- note that this function checks for the existence of the daemon +-- when not found, returns true else tries for 5 times and +-- returns false +CREATE OR REPLACE FUNCTION test_daemon.maintenance_daemon_died(p_dbname text) + RETURNS boolean + LANGUAGE plpgsql +AS $$ +DECLARE + activity record; +BEGIN + PERFORM pg_stat_clear_snapshot(); + SELECT * INTO activity FROM pg_stat_activity + WHERE application_name = 'Citus Maintenance Daemon' AND datname = p_dbname; + IF activity.pid IS NULL THEN + RETURN true; + ELSE + RETURN false; + END IF; +END; +$$; + +-- drop the database and see that the daemon is dead +DROP DATABASE another; +SELECT + * +FROM + test_daemon.maintenance_daemon_died('another'); + +-- we don't need the schema and the function anymore +DROP SCHEMA test_daemon CASCADE; + + +-- verify citus does not crash while creating a table when run against an older worker +-- create_distributed_table piggybacks multiple commands into single one, if one worker +-- did not have the required UDF it should fail instead of crash. + +-- create a test database, configure citus with single node +CREATE DATABASE another; +\c - - - :worker_1_port +CREATE DATABASE another; +\c - - - :master_port + +\c another +CREATE EXTENSION citus; +SET citus.enable_object_propagation TO off; -- prevent distributed transactions during add node +SELECT FROM master_add_node('localhost', :worker_1_port); + +\c - - - :worker_1_port +CREATE EXTENSION citus; +ALTER FUNCTION assign_distributed_transaction_id(initiator_node_identifier integer, transaction_number bigint, transaction_stamp timestamp with time zone) +RENAME TO dummy_assign_function; + +\c - - - :master_port +SET citus.shard_replication_factor to 1; +-- create_distributed_table command should fail +CREATE TABLE t1(a int, b int); +SET client_min_messages TO ERROR; +DO $$ +BEGIN + BEGIN + SELECT create_distributed_table('t1', 'a'); + EXCEPTION WHEN OTHERS THEN + RAISE 'create distributed table failed'; + END; +END; +$$; + +\c regression +\c - - - :master_port +DROP DATABASE another; + +\c - - - :worker_1_port +DROP DATABASE another; + +\c - - - :master_port +-- only the regression database should have a maintenance daemon +SELECT count(*) FROM pg_stat_activity WHERE application_name = 'Citus Maintenance Daemon'; + +-- recreate the extension immediately after the maintenancae daemon errors +SELECT pg_cancel_backend(pid) FROM pg_stat_activity WHERE application_name = 'Citus Maintenance Daemon'; +DROP EXTENSION citus; +CREATE EXTENSION citus; + +-- wait for maintenance daemon restart +SELECT datname, current_database(), + usename, (SELECT extowner::regrole::text FROM pg_extension WHERE extname = 'citus') +FROM test.maintenance_worker(); + +-- confirm that there is only one maintenance daemon +SELECT count(*) FROM pg_stat_activity WHERE application_name = 'Citus Maintenance Daemon'; + +-- kill the maintenance daemon +SELECT pg_cancel_backend(pid) FROM pg_stat_activity WHERE application_name = 'Citus Maintenance Daemon'; + +-- reconnect +\c - - - :master_port +-- run something that goes through planner hook and therefore kicks of maintenance daemon +SELECT 1; + +-- wait for maintenance daemon restart +SELECT datname, current_database(), + usename, (SELECT extowner::regrole::text FROM pg_extension WHERE extname = 'citus') +FROM test.maintenance_worker(); + +-- confirm that there is only one maintenance daemon +SELECT count(*) FROM pg_stat_activity WHERE application_name = 'Citus Maintenance Daemon'; + +DROP TABLE version_mismatch_table; diff --git a/packaging_automation/tests/test.py b/packaging_automation/tests/test.py new file mode 100644 index 00000000..94fff2fe --- /dev/null +++ b/packaging_automation/tests/test.py @@ -0,0 +1,5 @@ + +import git +repo = git.Repo("/vagrant/release/tools") +commit = repo.head.commit +print(commit.message) \ No newline at end of file diff --git a/packaging_automation/tests/test_common_tool_methods.py b/packaging_automation/tests/test_common_tool_methods.py new file mode 100644 index 00000000..41e8f70d --- /dev/null +++ b/packaging_automation/tests/test_common_tool_methods.py @@ -0,0 +1,201 @@ +import os +import unittest +import uuid +from datetime import datetime +from shutil import copyfile + +import pathlib2 +from github import Github + +from ..common_tool_methods import ( + find_nth_occurrence_position, is_major_release, + str_array_to_str, run, remove_text_with_parenthesis, get_version_details, + replace_line_in_file, get_prs_for_patch_release, filter_prs_by_label, get_upcoming_minor_version, + get_project_version_from_tag_name, find_nth_matching_line_and_line_number, get_minor_version, + get_patch_version_regex, append_line_in_file, prepend_line_in_file, remote_branch_exists, get_current_branch, + local_branch_exists, get_last_commit_message) + +GITHUB_TOKEN = os.getenv("GH_TOKEN") +TEST_BASE_PATH = pathlib2.Path(__file__).parent.absolute() + + +class CommonToolMethodsTestCases(unittest.TestCase): + + def test_find_nth_occurrence_position(self): + self.assertEqual(find_nth_occurrence_position("foofoo foofoo", "foofoo", 2), 7) + + def test_find_nth_matching_line_number_by_regex(self): + # Two match case + self.assertEqual(find_nth_matching_line_and_line_number("citusx\n citusx\ncitusx", "^citusx$", 2)[0], 2) + # No match case + self.assertEqual(find_nth_matching_line_and_line_number("citusx\n citusx\ncitusx", "^citusy$", 2)[0], -1) + + def test_is_major_release(self): + self.assertEqual(True, is_major_release("10.0.0")) + self.assertEqual(False, is_major_release("10.0.1")) + + def test_get_project_version_from_tag_name(self): + tag_name = "v10.0.3" + self.assertEqual("10.0.3", get_project_version_from_tag_name(tag_name)) + + def test_str_array_to_str(self): + self.assertEqual("1\n2\n3\n4\n", str_array_to_str(["1", "2", "3", "4"])) + + def test_run(self): + result = run("echo 'Run' method is performing fine ") + self.assertEqual(0, result.returncode) + + + + def test_remove_paranthesis_from_string(self): + self.assertEqual("out of paranthesis ", + remove_text_with_parenthesis("out of paranthesis (inside paranthesis)")) + + def test_get_version_details(self): + self.assertEqual({"major": "10", "minor": "0", "patch": "1"}, get_version_details("10.0.1")) + + def test_replace_line_in_file(self): + replace_str = "Summary: Replace Test" + copy_file_path = f"{TEST_BASE_PATH}/files/citus_copy.spec" + copyfile(f"{TEST_BASE_PATH}/files/citus.spec", copy_file_path) + replace_line_in_file(copy_file_path, r"^Summary: *", replace_str) + try: + with open(copy_file_path, "r") as reader: + content = reader.read() + lines = content.splitlines() + self.assertEqual(lines[5], replace_str) + finally: + os.remove(copy_file_path) + + def test_get_upcoming_minor_version(self): + assert get_upcoming_minor_version("10.1.0") == "10.2" + + def test_get_last_commit_message(self): + current_branch_name = get_current_branch(os.getcwd()) + test_branch_name = f"test{uuid.uuid4()}" + run(f"git checkout -b {test_branch_name}") + try: + with open(test_branch_name,"w") as writer: + writer.write("Test content") + run(f"git add .") + commit_message = f"Test message for {test_branch_name}" + run(f"git commit -m '{commit_message}'") + assert get_last_commit_message(os.getcwd()) == f"{commit_message}\n" + finally: + run(f"git checkout {current_branch_name}") + run(f"git branch -D {test_branch_name}") + + + def test_getprs(self): + # created at is not seen on Github. Should be checked on API result + g = Github(GITHUB_TOKEN) + repository = g.get_repo(f"citusdata/citus") + prs = get_prs_for_patch_release(repository, datetime.strptime('2021.02.26', '%Y.%m.%d'), "master", + datetime.strptime('2021.03.02', '%Y.%m.%d')) + self.assertEqual(1, len(prs)) + self.assertEqual(4751, prs[0].number) + + def test_getprs_with_backlog_label(self): + g = Github(GITHUB_TOKEN) + repository = g.get_repo(f"citusdata/citus") + prs = get_prs_for_patch_release(repository, datetime.strptime('2021.02.20', '%Y.%m.%d'), "master", + datetime.strptime('2021.02.27', '%Y.%m.%d')) + prs_backlog = filter_prs_by_label(prs, "backport") + self.assertEqual(1, len(prs_backlog)) + self.assertEqual(4746, prs_backlog[0].number) + + def test_local_branch_exists(self): + current_branch_name = get_current_branch(os.getcwd()) + branch_name = "develop-local-test" + self.assertTrue(remote_branch_exists("develop", os.getcwd())) + self.assertFalse(remote_branch_exists("develop2", os.getcwd())) + try: + run(f"git checkout -b {branch_name}") + self.assertTrue(local_branch_exists(branch_name, os.getcwd())) + run(f"git checkout {current_branch_name} ") + finally: + run(f"git branch -D {branch_name}") + + self.assertFalse(remote_branch_exists("develop_test", os.getcwd())) + + def test_remote_branch_exists(self): + current_branch_name = get_current_branch(os.getcwd()) + branch_name = "develop-remote-test" + try: + try: + run(f"git branch -D {branch_name}") + except: + print(f"{branch_name} already deleted ") + run(f"git checkout develop") + run(f"git checkout -b {branch_name}") + run(f"git push --set-upstream origin {branch_name}") + run(f"git checkout develop") + run(f"git branch -D {branch_name}") + self.assertTrue(remote_branch_exists(branch_name, os.getcwd())) + self.assertFalse(remote_branch_exists(f"{branch_name}{uuid.uuid4()}", os.getcwd())) + finally: + run(f"git checkout {current_branch_name} ") + # run(f"git branch -D {branch_name}") + run(f"git push origin --delete {branch_name}") + + def test_get_minor_version(self): + self.assertEqual("10.0", get_minor_version("10.0.3")) + + def test_get_patch_version_regex(self): + self.assertEqual("10\.0\.\d{1,3}", get_patch_version_regex("10.0.3")) + + def test_append_line_in_file(self): + test_file = "test_append.txt" + try: + with open(test_file, "a") as writer: + writer.write("Test line 1\n") + writer.write("Test line 2\n") + writer.write("Test line 3\n") + writer.write("Test line 4\n") + writer.write("Test line 5\n") + writer.write("Test line 6\n") + writer.write("Test line 7\n") + writer.write("Test line 8\n") + append_line_in_file(test_file, "^Test line 1", "Test line 1.5") + append_line_in_file(test_file, "^Test line 2", "Test line 2.5") + append_line_in_file(test_file, "^Test line 5", "Test line 5.5") + + with open(test_file, "r") as reader: + lines = reader.readlines() + self.assertEqual(11, len(lines)) + self.assertEqual(lines[0], "Test line 1\n") + self.assertEqual(lines[1], "Test line 1.5\n") + self.assertEqual(lines[2], "Test line 2\n") + self.assertEqual(lines[3], "Test line 2.5\n") + finally: + os.remove(test_file) + + def test_prepend_line_in_file(self): + test_file = "test_prepend.txt" + try: + with open(test_file, "a") as writer: + writer.write("Test line 1\n") + writer.write("Test line 2\n") + writer.write("Test line 3\n") + writer.write("Test line 4\n") + writer.write("Test line 5\n") + writer.write("Test line 6\n") + writer.write("Test line 7\n") + writer.write("Test line 8\n") + prepend_line_in_file(test_file, "^Test line 1", "Test line 0.5") + prepend_line_in_file(test_file, "^Test line 2", "Test line 1.5") + prepend_line_in_file(test_file, "^Test line 5", "Test line 4.5") + + with open(test_file, "r") as reader: + lines = reader.readlines() + self.assertEqual(11, len(lines)) + self.assertEqual(lines[0], "Test line 0.5\n") + self.assertEqual(lines[1], "Test line 1\n") + self.assertEqual(lines[2], "Test line 1.5\n") + self.assertEqual(lines[3], "Test line 2\n") + finally: + os.remove(test_file) + + +if __name__ == '__main__': + unittest.main() diff --git a/packaging_automation/tests/test_prepare_release.py b/packaging_automation/tests/test_prepare_release.py new file mode 100644 index 00000000..6bc5a968 --- /dev/null +++ b/packaging_automation/tests/test_prepare_release.py @@ -0,0 +1,115 @@ +import os +import uuid +from datetime import datetime + +import pathlib2 + +from ..common_tool_methods import (file_includes_line, count_line_in_file, run, get_last_commit_message, + remove_cloned_code) +from ..prepare_release import (update_release, MULTI_EXTENSION_OUT, MULTI_EXTENSION_SQL, CONFIGURE, + CONFIGURE_IN, CITUS_CONTROL, CONFIG_PY) + +github_token = os.getenv("GH_TOKEN") + +BASE_PATH = pathlib2.Path(__file__).parents[2] if os.getenv("BASE_PATH") is None else os.getenv("BASE_PATH") + +MAIN_BRANCH = "test-tools-scripts" +TEST_CHECKOUT_DIR = "citus_test" + +resources_to_be_deleted = [] + + +def initialize_env() -> str: + test_base_path_major = f"{BASE_PATH}/{uuid.uuid4()}" + remove_cloned_code(test_base_path_major) + if not os.path.exists(test_base_path_major): + run(f"git clone https://github.com/citusdata/citus.git {test_base_path_major}") + return test_base_path_major + + +def test_major_release(): + test_base_path_major = initialize_env() + os.chdir(test_base_path_major) + resources_to_be_deleted.append(test_base_path_major) + + previous_print_extension_changes = count_line_in_file(test_base_path_major, MULTI_EXTENSION_OUT, + "SELECT * FROM print_extension_changes();") + + update_release_return_value = update_release( + github_token=github_token, project_name="citus", project_version="10.1.0", main_branch=MAIN_BRANCH, + earliest_pr_date=datetime.strptime('2021.03.25 00:00', '%Y.%m.%d %H:%M'), + exec_path=test_base_path_major, is_test=True) + + run(f"git checkout {update_release_return_value.release_branch_name}") + + assert file_includes_line(test_base_path_major, MULTI_EXTENSION_OUT, " 10.1.0") + assert file_includes_line(test_base_path_major, CONFIGURE_IN, "AC_INIT([Citus], [10.1.0])") + assert file_includes_line(test_base_path_major, CONFIGURE, "PACKAGE_VERSION='10.1.0'") + assert file_includes_line(test_base_path_major, CONFIGURE, "PACKAGE_STRING='Citus 10.1.0'") + assert file_includes_line(test_base_path_major, CONFIGURE, + r"\`configure' configures Citus 10.1.0 to adapt to many kinds of systems.") + assert file_includes_line(test_base_path_major, CONFIGURE, + ' short | recursive ) echo "Configuration of Citus 10.1.0:";;') + assert file_includes_line(test_base_path_major, CONFIGURE, "PACKAGE_VERSION='10.1.0'") + assert get_last_commit_message(test_base_path_major) == "Bump citus version to 10.1.0\n" + + run(f"git checkout {update_release_return_value.upcoming_version_branch}") + + assert file_includes_line(test_base_path_major, CITUS_CONTROL, "default_version = '10.2-1'") + assert file_includes_line(test_base_path_major, MULTI_EXTENSION_OUT, + "-- Test downgrade to 10.1-1 from 10.2-1") + assert file_includes_line(test_base_path_major, MULTI_EXTENSION_OUT, + "ALTER EXTENSION citus UPDATE TO '10.1-1';") + assert count_line_in_file(test_base_path_major, MULTI_EXTENSION_OUT, + "ALTER EXTENSION citus UPDATE TO '10.2-1';") == 2 + assert file_includes_line(test_base_path_major, MULTI_EXTENSION_OUT, + "-- Should be empty result since upgrade+downgrade should be a no-op") + assert count_line_in_file(test_base_path_major, MULTI_EXTENSION_OUT, + "SELECT * FROM print_extension_changes();") - previous_print_extension_changes == 2 + assert file_includes_line(test_base_path_major, MULTI_EXTENSION_OUT, + "-- Snapshot of state at 10.2-1") + assert file_includes_line(test_base_path_major, MULTI_EXTENSION_OUT, " 10.2devel") + + assert count_line_in_file(test_base_path_major, MULTI_EXTENSION_SQL, + "ALTER EXTENSION citus UPDATE TO '10.2-1';") == 2 + assert file_includes_line(test_base_path_major, CONFIG_PY, "MASTER_VERSION = '10.2'") + assert file_includes_line(test_base_path_major, CONFIGURE_IN, "AC_INIT([Citus], [10.2devel])") + assert file_includes_line(test_base_path_major, CONFIGURE, "PACKAGE_VERSION='10.2devel'") + assert file_includes_line(test_base_path_major, CONFIGURE, "PACKAGE_STRING='Citus 10.2devel'") + assert file_includes_line(test_base_path_major, CONFIGURE, + r"\`configure' configures Citus 10.2devel to adapt to many kinds of systems.") + assert file_includes_line(test_base_path_major, CONFIGURE, + ' short | recursive ) echo "Configuration of Citus 10.2devel:";;') + assert file_includes_line(test_base_path_major, CONFIGURE, "PACKAGE_VERSION='10.2devel'") + assert os.path.exists(f"{test_base_path_major}/{update_release_return_value.upgrade_path_sql_file}") + assert os.path.exists(f"{test_base_path_major}/{update_release_return_value.downgrade_path_sql_file}") + assert get_last_commit_message(test_base_path_major) == 'Bump citus version to 10.2devel\n' + run(f"git checkout {MAIN_BRANCH}") + + +def test_patch_release(): + test_base_path_patch = initialize_env() + resources_to_be_deleted.append(test_base_path_patch) + os.chdir(test_base_path_patch) + try: + project_version = "10.0.4" + schema_version = "10.1-5" + update_release( + github_token=github_token, project_name="citus", project_version=project_version, + main_branch=MAIN_BRANCH, + earliest_pr_date=datetime.strptime('2021.03.25 00:00', '%Y.%m.%d %H:%M'), + exec_path=test_base_path_patch, is_test=True, schema_version=schema_version) + assert file_includes_line(test_base_path_patch, MULTI_EXTENSION_OUT, f" {project_version}") + assert file_includes_line(test_base_path_patch, CONFIGURE_IN, f"AC_INIT([Citus], [{project_version}])") + assert file_includes_line(test_base_path_patch, CONFIGURE, f"PACKAGE_VERSION='{project_version}'") + assert file_includes_line(test_base_path_patch, CONFIGURE, f"PACKAGE_STRING='Citus {project_version}'") + assert file_includes_line(test_base_path_patch, CONFIGURE, + rf"\`configure' configures Citus {project_version} to adapt to many kinds of systems.") + assert file_includes_line(test_base_path_patch, CONFIGURE, + f' short | recursive ) echo "Configuration of Citus {project_version}:";;') + assert file_includes_line(test_base_path_patch, CONFIGURE, f"PACKAGE_VERSION='{project_version}'") + assert file_includes_line(test_base_path_patch, CITUS_CONTROL, f"default_version = '{schema_version}'") + run(f"git checkout {MAIN_BRANCH}") + finally: + for path in resources_to_be_deleted: + run(f"sudo rm -rf {path}") diff --git a/python/tests/test_update_package_properties.py b/packaging_automation/tests/test_update_package_properties.py similarity index 92% rename from python/tests/test_update_package_properties.py rename to packaging_automation/tests/test_update_package_properties.py index d3bdc4cf..d9b5665f 100644 --- a/python/tests/test_update_package_properties.py +++ b/packaging_automation/tests/test_update_package_properties.py @@ -1,9 +1,16 @@ import pytest import os +import re from shutil import copyfile +import pathlib2 +from datetime import datetime from .test_utils import are_strings_equal -from ..update_package_properties import * +from ..update_package_properties import (PackagePropertiesParams, changelog_for_tag, + get_last_changelog_content_from_debian, debian_changelog_header, + prepend_latest_changelog_into_debian_changelog, + convert_citus_changelog_into_rpm_changelog, spec_file_name, update_rpm_spec, + update_pkgvars, update_all_changes) TEST_BASE_PATH = pathlib2.Path(__file__).parent.absolute() BASE_PATH = os.getenv("BASE_PATH", default=pathlib2.Path(__file__).parents[1]) @@ -13,8 +20,8 @@ PROJECT_NAME = os.getenv("PROJECT_NAME", default="citus") MICROSOFT_EMAIL = os.getenv("MICROSOFT_EMAIL", default="gindibay@microsoft.com") NAME_SURNAME = os.getenv("NAME_SURNAME", default="Gurkan Indibay") -CHANGELOG_DATE = datetime.strptime('Thu, 18 Mar 2021 01:40:08 +0000', '%a, %d %b %Y %H:%M:%S %z') if os.getenv( - "CHANGELOG_DATE") is None else datetime.strptime(os.getenv("CHANGELOG_DATE"), '%a, %d %b %Y %H:%M:%S %z') +CHANGELOG_DATE_STR = os.getenv("CHANGELOG_DATE", 'Thu, 18 Mar 2021 01:40:08 +0000') +CHANGELOG_DATE = datetime.strptime(CHANGELOG_DATE_STR, '%a, %d %b %Y %H:%M:%S %z') def default_changelog_param_for_test(latest_changelog, changelog_date): @@ -36,10 +43,6 @@ def test_get_version_number_with_project_name(): assert DEFAULT_CHANGELOG_PARAM_FOR_TEST.version_number_with_project_name() == "10.0.3.citus-1" -def test_find_nth_overlapping(): - assert find_nth_overlapping("foofoo foofoo", "foofoo", 2) == 7 - - def test_get_changelog_for_tag(): changelog = changelog_for_tag(GITHUB_TOKEN, "citus", "v10.0.3") with open(f"{TEST_BASE_PATH}/files/verify/expected_changelog_10.0.3.txt", "r") as reader: diff --git a/python/tests/test_utils.py b/packaging_automation/tests/test_utils.py similarity index 100% rename from python/tests/test_utils.py rename to packaging_automation/tests/test_utils.py diff --git a/python/update_package_properties.py b/packaging_automation/update_package_properties.py similarity index 89% rename from python/update_package_properties.py rename to packaging_automation/update_package_properties.py index 15483707..3db607c9 100644 --- a/python/update_package_properties.py +++ b/packaging_automation/update_package_properties.py @@ -1,14 +1,16 @@ import argparse import re -from datetime import date, datetime +from dataclasses import dataclass +from datetime import datetime import pathlib2 import string_utils from github import Github -from jinja2 import Environment, FileSystemLoader from parameters_validation import (no_whitespaces, non_blank, non_empty, non_negative, validate_parameters, parameter_validation) -from dataclasses import dataclass + +from .common_tool_methods import (find_nth_matching_line_and_line_number, find_nth_occurrence_position, + get_project_version_from_tag_name, get_template_environment) BASE_PATH = pathlib2.Path(__file__).parent.absolute() @@ -22,7 +24,7 @@ def is_version(version: str): raise ValueError("version should be non-empty and should not be None") if not re.match(r"\d+\.\d+\.\d+$", version): raise ValueError( - "version should include 3 levels of versions consists of numbers separated with dots. e.g: 10.0.1") + "version should include 3 levels giof versions consists of numbers separated with dots. e.g: 10.0.1") @parameter_validation @@ -92,36 +94,8 @@ def spec_file_name(project_name: str) -> str: return f"{project_name}.spec" -def get_template_environment(template_dir: str) -> Environment: - file_loader = FileSystemLoader(template_dir) - env = Environment(loader=file_loader) - return env - - -def find_nth_overlapping(subject_string, search_string, n) -> int: - start = subject_string.find(search_string) - - while start >= 0 and n > 1: - start = subject_string.find(search_string, start + 1) - n -= 1 - return start - - -def find_nth_overlapping_line_by_regex(subject_string, regex_pattern, n) -> int: - lines = subject_string.splitlines() - counter = 0 - index = -1 - for i in range(len(lines)): - if re.match(regex_pattern, lines[i]): - counter = counter + 1 - if counter == n: - index = i - break - return index - - def get_last_changelog_content(all_changelog_content: str) -> str: - second_changelog_index = find_nth_overlapping(all_changelog_content, "###", 3) + second_changelog_index = find_nth_occurrence_position(all_changelog_content, "###", 3) changelogs = all_changelog_content[:second_changelog_index] lines = changelogs.splitlines() if len(lines) < 1: @@ -133,7 +107,8 @@ def get_last_changelog_content(all_changelog_content: str) -> str: def get_last_changelog_content_from_debian(all_changelog_content: str) -> str: - second_changelog_index = find_nth_overlapping_line_by_regex(all_changelog_content, "^[a-zA-Z]", 2) + second_changelog_index, second_changelog_line = find_nth_matching_line_and_line_number(all_changelog_content, + "^[a-zA-Z]", 2) lines = all_changelog_content.splitlines() changelogs = "\n".join(lines[:second_changelog_index - 1]) + "\n" if len(lines) < 1: @@ -141,7 +116,7 @@ def get_last_changelog_content_from_debian(all_changelog_content: str) -> str: return changelogs -def remove_parentheses_from_string(param: str) -> str: +def remove_paranthesis_from_string(param: str) -> str: return re.sub(r"[(\[].*?[)\]]", "", param) @@ -153,15 +128,15 @@ def changelog_for_tag(github_token: str, project_name: str, tag_name: str) -> st return last_changelog_content -# truncates # chars , get the version an put parentheses around version number adds 'stable; urgency=low' at the end +# truncates # chars , get the version an put paranthesis around version number adds 'stable; urgency=low' at the end # changelog_header=> ### citus v8.3.3 (March 23, 2021) ### # debian header => citus (10.0.3.citus-1) stable; urgency=low @validate_parameters def debian_changelog_header(changelog_header: is_project_changelog_header(str), fancy: bool, fancy_version_number: int) -> str: hash_removed_string = changelog_header.lstrip("### ").rstrip(" ###") - parentheses_removed_string = remove_parentheses_from_string(hash_removed_string) - words = parentheses_removed_string.strip().split(" ") + paranthesis_removed_string = remove_paranthesis_from_string(hash_removed_string) + words = paranthesis_removed_string.strip().split(" ") if len(words) != 2: raise ValueError("Two words should be included in striped version header") project_name = words[0] @@ -291,7 +266,6 @@ def update_all_changes(github_token: non_empty(non_blank(str)), package_properti parser = argparse.ArgumentParser() parser.add_argument('--gh_token') parser.add_argument('--prj_name') - parser.add_argument('--prj_ver') parser.add_argument('--tag_name') parser.add_argument('--fancy') parser.add_argument('--fancy_ver_no') @@ -305,9 +279,11 @@ def update_all_changes(github_token: non_empty(non_blank(str)), package_properti raise ValueError(f"fancy_ver_no is expected to be numeric actual value {arguments.fancy_ver_no}") exec_date = datetime.strptime(arguments.date, '%Y.%m.%d %H:%M:%S %z') + is_tag(arguments.tag_name) + prj_ver = get_project_version_from_tag_name(arguments.tag_name) package_properties = PackagePropertiesParams(project_name=arguments.prj_name, - project_version=arguments.prj_ver, fancy=arguments.fancy, + project_version=prj_ver, fancy=arguments.fancy, fancy_version_number=int(arguments.fancy_ver_no), name_surname=arguments.name, microsoft_email=arguments.email, changelog_date=exec_date) diff --git a/python/upload_to_package_cloud.py b/packaging_automation/upload_to_package_cloud.py similarity index 100% rename from python/upload_to_package_cloud.py rename to packaging_automation/upload_to_package_cloud.py diff --git a/python/requirements.txt b/python/requirements.txt deleted file mode 100644 index c4ef1faf..00000000 --- a/python/requirements.txt +++ /dev/null @@ -1,80 +0,0 @@ -adal==1.2.2 -appdirs==1.4.3 -attrs==19.3.0 -Automat==0.8.0 -blinker==1.4 -certifi==2019.11.28 -chardet==3.0.4 -Click==7.0 -colorama==0.4.3 -configobj==5.0.6 -constantly==15.1.0 -cryptography==2.8 -dbus-next==0.2.2 -Deprecated==1.2.12 -distlib==0.3.0 -distro==1.4.0 -entrypoints==0.3 -filelock==3.0.12 -httplib2==0.14.0 -hyperlink==19.0.0 -idna==2.8 -importlib-metadata==1.5.0 -incremental==16.10.1 -iniconfig==1.1.1 -Jinja2==2.10.1 -jsonpatch==1.22 -jsonpointer==2.0 -jsonschema==3.2.0 -keyring==18.0.1 -launchpadlib==1.10.13 -lazr.restfulclient==0.14.2 -lazr.uri==1.0.3 -MarkupSafe==1.1.0 -more-itertools==4.2.0 -netifaces==0.10.4 -oauthlib==3.1.0 -packaging==20.9 -parameters-validation==1.2.0 -pathlib2==2.3.5 -pbr==5.5.1 -pexpect==4.6.0 -pipenv==2020.11.15 -pluggy==0.13.1 -py==1.10.0 -pyasn1==0.4.2 -pyasn1-modules==0.2.1 -pycairo==1.20.0 -pycurl==7.43.0.6 -PyGithub==1.54.1 -Pygments==2.3.1 -PyHamcrest==1.9.0 -PyJWT==1.7.1 -pymacaroons==0.13.0 -PyNaCl==1.3.0 -pyOpenSSL==19.0.0 -pyparsing==2.4.7 -pyrsistent==0.15.5 -pyserial==3.4 -pytest==6.2.3 -python-dateutil==2.7.3 -python-debian===0.1.36 -python-string-utils==1.0.0 -PyYAML==5.3.1 -requests==2.22.0 -requests-unixsocket==0.2.0 -SecretStorage==2.3.1 -service-identity==18.1.0 -simplejson==3.16.0 -six==1.14.0 -ssh-import-id==5.10 -systemd-python==234 -testresources==2.0.1 -toml==0.10.2 -Twisted==18.9.0 -urllib3==1.25.8 -virtualenv==20.0.17 -wadllib==1.3.3 -wrapt==1.12.1 -zipp==1.0.0 -zope.interface==4.7.1