diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index c20656c8..ab9763d1 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -1,6 +1,6 @@ name: Pull Request run-name: | - PR #${{ github.event.pull_request.number }} [${{ github.event_name == 'pull_request' && 'updated' || 'reviewed' }}, ${{ github.event.action }}${{ github.event.pull_request.draft && ', draft' || '' }}] ${{github.event.pull_request.title}} + PR #${{ github.event.pull_request.number }} [${{ github.event_name == 'pull_request' && 'updated' || 'reviewed' }}, ${{ github.event_name == 'pull_request' && github.event.action || github.event.review.state }}${{ github.event.pull_request.draft && ', draft' || '' }}] ${{github.event.pull_request.title}} on: pull_request: @@ -110,27 +110,13 @@ jobs: ;; esac - # Fail if no test is enabled, to report a failed check and block the PR from automerging. - # It is ok to generate failed run, since they will be cleaned up once the PR is closed. - if [ -z "${basic}${full}${test_not_needed}" ]; then - printf -- "ERROR: PR #${{ github.event.pull_request.number }} is not ready for testing yet.\n" >&2 - check_failed=y - elif [ -n "${test_not_needed}" ]; then - printf -- "PR #${{ github.event.pull_request.number }} doesn't need further testing.\n" >&2 - fi - ( - echo CHECK_FAILED=${check_failed} + # echo CHECK_FAILED=${check_failed} echo VALIDATE_BASIC=${basic} echo VALIDATE_FULL=${full} echo VALIDATE_DEB=${full} ) >> ${GITHUB_OUTPUT} - - name: "Fail job if check failed" - run: | - exit 1 - if: steps.config.outputs.CHECK_FAILED - basic-validation: needs: check-trigger if: ${{ needs.check-trigger.outputs.VALIDATE_BASIC }} @@ -187,21 +173,3 @@ jobs: base-tag: ${{ matrix.base-tag }} platform: ${{ matrix.platform }} pull-request: true - - enable-merge: - needs: - - check-trigger - - full-validation - - deb-validation - uses: ./.github/workflows/pull_request_check_approval.yml - if: ${{ needs.check-trigger.outputs.VALIDATE_FULL && !needs.check-trigger.outputs.VALIDATE_BASIC }} - - enable-merge-at-once: - needs: - - check-trigger - - full-validation - - deb-validation - - basic-validation - uses: ./.github/workflows/pull_request_check_approval.yml - if: ${{ needs.check-trigger.outputs.VALIDATE_FULL && needs.check-trigger.outputs.VALIDATE_BASIC }} - diff --git a/.github/workflows/pull_request_check_approval.yml b/.github/workflows/pull_request_check_approval.yml deleted file mode 100644 index d114d6a2..00000000 --- a/.github/workflows/pull_request_check_approval.yml +++ /dev/null @@ -1,99 +0,0 @@ -name: Pull Request (Check Approval) -run-name: | - PR #${{ github.event.pull_request.number }} [${{ github.event_name == 'pull_request' && 'check approval on PR update' || 'fully validated' }}] - -on: - pull_request: - branches: - - master - paths-ignore: - # No sense in doing these tests for these file - - 'README.md' - - 'docs/**/*' - types: - - opened - - synchronize - - ready_for_review - - workflow_call: - -concurrency: - group: pr-test-automerge-${{ github.ref }} - cancel-in-progress: true - -permissions: - packages: read - contents: read - -jobs: - check-trigger: - runs-on: ubuntu-latest - steps: - - name: Clone uno - uses: actions/checkout@v4 - with: - path: src/uno - submodules: true - - - name: "Check if PR is ready for merging" - id: config - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - case "${{ github.event_name }}" in - pull_request) - if [ "${{ github.event.pull_request.draft }}" = true ]; then - # Draft PRs can't ever be merged - check_failed=y - else - case "${{ github.event.action }}" in - opened) - # just opened PR can't have possibly been reviewed already, don't merge - check_failed=y - ;; - synchronize) - # A commit was pushed to the PR, the PR can't be approved yet - check_failed=y - ;; - ready_for_review) - # PR taken out of draft, fail only if not "approved". - # Ideally: we would just query the review state, but that doesn't seem to - # be available on the pull_request object, - # see: https://docs.github.com/en/webhooks/webhook-events-and-payloads#pull_request) - # So we use the GitHub API to query the state, - # see: https://stackoverflow.com/a/77647838 - (cd src/uno; gh repo set-default ${{ github.repository }}) - review_state=$(cd src/uno; gh pr view 11 --json reviewDecision --jq '.reviewDecision') - case "${review_state}" in - APPROVED) - ;; - *) - check_failed=y - ;; - esac - ;; - esac - fi - ;; - pull_request_review) - case "${{ github.event.review.state }}" in - approved) - ;; - *) - check_failed=y - ;; - esac - ;; - workflow_call) - # When call by another workflow assume it means the PR is approved - ;; - esac - ( - echo CHECK_FAILED=${check_failed} - ) >> "${GITHUB_OUTPUT}" - - - - name: "Fail if PR is not ready yet" - run: | - exit 1 - if: steps.config.outputs.CHECK_FAILED diff --git a/.github/workflows/pull_request_closed.yml b/.github/workflows/pull_request_closed.yml index d2cc6549..bf052254 100644 --- a/.github/workflows/pull_request_closed.yml +++ b/.github/workflows/pull_request_closed.yml @@ -45,10 +45,15 @@ jobs: - name: "Clean up workflow runs" run: | - src/uno/scripts/cleanup_closed_pull_request.sh \ - ${{ github.repository }} \ - ${{ github.event_name == 'pull_request' && github.event.pull_request.number || inputs.pr-number }} \ - ${{ github.event_name == 'pull_request' && github.event.pull_request.merged || inputs.pr-merged }} + docker run --rm \ + -v $(pwd):/workspace \ + -e GH_TOKEN=${GH_TOKEN} \ + -w /workspace \ + ${ADMIN_IMAGE} \ + src/uno/scripts/ci-admin pr-closed \ + -r ${{ github.repository }} \ + -N ${{ github.event_name == 'pull_request' && github.event.pull_request.number || inputs.pr-number }} \ + ${{ (github.event_name == 'pull_request' && github.event.pull_request.merged || inputs.pr-merged) && '-m' || '' }} env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} ADMIN_IMAGE: ghcr.io/mentalsmash/uno-ci-admin:latest diff --git a/Makefile b/Makefile index dc7dc2b7..e84593c1 100644 --- a/Makefile +++ b/Makefile @@ -163,7 +163,7 @@ changelog: -v $(UNO_DIR)/:/uno \ -w /uno \ $(DEB_BUILDER) \ - /uno/scripts/bundle/update_changelog.sh + /uno/scripts/debian/update_changelog.sh # Build uno's debian packages. # Requires the Debian Builder image. @@ -172,7 +172,7 @@ debuild: -v $(UNO_DIR)/:/uno \ -w /uno \ $(DEB_BUILDER) \ - /uno/scripts/debian_build.sh + /uno/scripts/debian/build.sh # Run integration tests using the debian package. # Requires the Debian Tester image diff --git a/scripts/bundle/pyinstaller.sh b/scripts/bundle/pyinstaller.sh index bb8a8a70..acd72a30 100755 --- a/scripts/bundle/pyinstaller.sh +++ b/scripts/bundle/pyinstaller.sh @@ -9,7 +9,7 @@ FLAVOR=${1:-default} : "${DIST_DIR:=$(pwd)/dist/bundle/${FLAVOR}}" [ -n "${DIST_DIR}" ] -: "${BUILD_DIR:=$(pwd)/build/pyinstaller}" +: "${BUILD_DIR:=$(pwd)/build/bundle}" [ -n "${BUILD_DIR}" ] ( @@ -17,17 +17,18 @@ FLAVOR=${1:-default} rm -rf ${BUILD_DIR} ) -: "${SCRIPTS:=\ - ./scripts/bundle/uno - ./uno/test/integration/runner.py}" -[ -n "${SCRIPTS}" ] - : "${VENV_PYINST:=${BUILD_DIR}/venv-pyinst}" [ -n "${VENV_PYINST}" ] : "${VENV_UNO:=${BUILD_DIR}/venv-uno}" [ -n "${VENV_UNO}" ] + +: "${SCRIPTS:=\ + ${VENV_UNO}/bin/uno + ./uno/test/integration/runner.py}" +[ -n "${SCRIPTS}" ] + if [ ! -d "${VENV_PYINST}" ]; then ( set -x diff --git a/scripts/bundle/uno b/scripts/bundle/uno deleted file mode 100644 index c10a7c37..00000000 --- a/scripts/bundle/uno +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python3 -from uno.cli.uno import main -main() diff --git a/scripts/ci-admin b/scripts/ci-admin new file mode 100755 index 00000000..905b4cbc --- /dev/null +++ b/scripts/ci-admin @@ -0,0 +1,905 @@ +#!/usr/bin/env python3 +############################################################################### +# Copyright 2020-2024 Andrea Sorbini +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +############################################################################### +# Example usage: +############################################################################### +# +# - list workflow runs for a repository: +# +# ci-admin select-runs -r mentalsmash/uno [-f ""] +# +# - delete workflow runs for a repository: +# +# ci-admin delete-runs -r mentalsmash/uno [-f ""] +# +# - list workflow runs for a PR: +# +# ci-admin pr-runs -r mentalsmash/uno -N +# +# - Clean up workflow runs after a PR is closed: +# +# ci-admin pr-closed -r mentalsmash/uno -N [--merged] +# +# - list packages for an organization (only containers supported for now): +# +# ci-admin select-packages -o mentalsmash [-f ""] +# +# - list versions for a package (owned by an organization): +# +# ci-admin select-versions -o uno -o mentalsmash [-f ""] +# +# - delete versions for a package (owned by an organization): +# +# ci-admin delete-versions -o uno -o mentalsmash [-f ""] +# +############################################################################### +# Common arguments: +############################################################################### +# --interactive: dynamically select entries (requires a tty) +# --noop: don't actually perform changes +############################################################################### +# Common arguments must be specified before the action +# (e.g. "ci-admin --interactive select-runs ...") +############################################################################### +import json +import os +import re +import sys +import subprocess +import argparse +import tempfile +import contextlib +import traceback +from functools import partial +from pathlib import Path +from typing import Generator, NamedTuple, TextIO, Protocol +from datetime import datetime + +ScriptsDir = Path(__file__).parent +ScriptNoninteractiveRequired = not sys.stdin.isatty() or not sys.stdout.isatty() +ScriptNoninteractive = True + +# GitHub API documentation: https://docs.github.com/en/rest/reference/packages +GitHubApiAccept = "application/vnd.github.v3+json" +# https://docs.github.com/en/rest/overview/api-versions?apiVersion=2022-11-28 +GitHubApiVersion = "2022-11-28" + +############################################################################### +# Global hooks to produce output to stdout, and possibly tabulate it +############################################################################### +TabulateEnabled = True +TabulateOutput = None +TabulateColumns = [] + + +def tabulate_columns(*columns: list[str]) -> None: + global TabulateColumns + TabulateColumns.clear() + TabulateColumns.extend(columns) + + +def output(*fields): + global TabulateOutput + global TabulateEnabled + if TabulateEnabled and TabulateOutput is None: + try: + TabulateOutput = subprocess.Popen( + ["column", "-t", "-s", "\t"], stdin=subprocess.PIPE, stderr=subprocess.PIPE + ) + if TabulateColumns: + columns = "\t".join(col.upper().replace("_", " ") for col in TabulateColumns) + TabulateOutput.stdin.write(columns.encode()) + TabulateOutput.stdin.write("\n".encode()) + except Exception: + # The process failed, assume column is not available + # and don't try to tabulate again + TabulateEnabled = False + + line = "\t".join(fields).strip() + if not TabulateOutput: + print(line) + else: + TabulateOutput.stdin.write(line.encode()) + TabulateOutput.stdin.write("\n".encode()) + + +############################################################################### +# mini logger API +############################################################################### +ColorEnabled = sys.stderr.isatty() and not os.environ.get("NO_COLORS", False) +try: + import termcolor +except Exception: + ColorEnabled = False + + +def _log_msg(lvl, fmt, *args, **print_args) -> None: + print_args.setdefault("file", sys.stderr) + line = fmt.format(*args) if args else fmt + line = f"[{lvl}]" + ("" if line.startswith("[") else " ") + line + if ColorEnabled: + color = { + "D": "magenta", + "A": "cyan", + "I": "green", + "W": "yellow", + "E": "red", + }[lvl] + line = termcolor.colored(line, color) + print(line, **print_args) + + +def _log_debug(*args, **print_args) -> None: + return _log_msg("D", *args, **print_args) + + +def _log_activity(*args, **print_args) -> None: + return _log_msg("A", *args, **print_args) + + +def _log_info(*args, **print_args) -> None: + return _log_msg("I", *args, **print_args) + + +def _log_error(*args, **print_args) -> None: + return _log_msg("E", *args, **print_args) + + +def _log_warning(*args, **print_args) -> None: + return _log_msg("W", *args, **print_args) + + +def _log_command(cmd: list[str], shell: bool = False, check: bool = False, **print_args) -> None: + import shlex + + if shell: + cmd = ["sh", f"-{'e' if check else ''}c", *cmd] + _log_debug("+ " + " ".join(["{}"] * len(cmd)), *(map(shlex.quote, cmd)), **print_args) + + +class LogFunction(Protocol): + def __call__(self, *args, **print_args) -> None: + pass + + +class LogCommandFunction(Protocol): + def __call__( + self, cmd: list[str], shell: bool = False, check: bool = False, **print_args + ) -> None: + pass + + +class Logger(NamedTuple): + debug: LogFunction + activity: LogFunction + info: LogFunction + warning: LogFunction + error: LogFunction + command: LogCommandFunction + + +log = Logger(_log_debug, _log_activity, _log_info, _log_warning, _log_error, _log_command) + + +############################################################################### +# Parse/print dates in the format used by the GH API +############################################################################### +def github_date_parse(date: str) -> datetime: + return datetime.strptime(date, "%Y-%m-%dT%H:%M:%SZ") + + +def github_date_str(date: datetime) -> str: + return date.strftime("%Y-%m-%dT%H:%M:%SZ") + + +############################################################################### +# Mixin for data objects +############################################################################### +class DataObject: + Parsers = {} + + @classmethod + def build(cls, obj_cls: type[NamedTuple], *args) -> NamedTuple: + cls.data_object(obj_cls) + args = list(args) + for i in obj_cls.DatetimeFields: + if not isinstance(args[i], datetime): + args[i] = github_date_parse(args[i]) + return obj_cls(*args) + + @classmethod + def parse(cls, obj_cls: type[NamedTuple], package_line: str) -> object | None: + cls.data_object(obj_cls) + try: + parse_re = cls.Parsers[obj_cls] + fields = parse_re.findall(package_line)[0] + return cls.build(obj_cls, *fields) + except Exception: + log.error("failed to parse {}: '{}'", obj_cls.__qualname__, package_line) + traceback.print_exc() + return None + + @classmethod + def str(cls, obj: NamedTuple) -> str: + fields = list(obj) + for i in obj.DatetimeFields: + fields[i] = github_date_str(fields[i]) + return "\t".join(map(str, fields)) + + @classmethod + def parse_re(cls, obj_cls: type[NamedTuple]) -> re.Pattern: + # Parse a string of fields separated by tabs. + assert len(obj_cls._fields) >= 1 + return re.compile( + "".join(("^", *(r"([^\t]+)[\t]+" for i in range(len(obj_cls._fields) - 1)), r"(.*)", "$")) + ) + + @classmethod + def data_object(cls, obj_cls: type[NamedTuple]) -> None: + if obj_cls not in cls.Parsers: + cls.Parsers[obj_cls] = cls.parse_re(obj_cls) + + +def parse(obj_cls: type[NamedTuple], package_line) -> NamedTuple: + return DataObject.parse(obj_cls, package_line) + + +def build(obj_cls: type[NamedTuple], *args) -> object | None: + return DataObject.build(obj_cls, *args) + + +############################################################################### +# GitHub Workflow Run data object (parsed from query result) +############################################################################### +class WorkflowRun(NamedTuple): + outcome: str + date: datetime + id: int + event: str + name: str + + DatetimeFields = [1] + + def __str__(self) -> str: + return DataObject.str(self) + + +############################################################################### +# GitHub Package data object (parsed from query result) +############################################################################### +class Package(NamedTuple): + id: str + name: str + visibility: str + repository: str + created_at: datetime + updated_at: datetime + + DatetimeFields = [4, 5] + + def __str__(self) -> str: + return DataObject.str(self) + + +############################################################################### +# GitHub PackageVersion data object (parsed from query result) +############################################################################### +class PackageVersion(NamedTuple): + id: str + name: str + tags: tuple[str] + created_at: datetime + updated_at: datetime + + DatetimeFields = [3, 4] + + def __str__(self) -> str: + return DataObject.str(self) + + +############################################################################### +# Perform cleanup procedures after on a closed Pull Request +############################################################################### +def pr_closed( + repo: str, pr_no: int, merged: bool, noop: bool = False +) -> list[tuple[bool, WorkflowRun]]: + def _result( + removed: list[WorkflowRun], preserved: list[WorkflowRun] + ) -> list[tuple[bool, WorkflowRun]]: + result = [*((True, run) for run in removed), *((False, run) for run in preserved)] + return sorted(result, key=lambda v: v[1].date) + + all_runs = pr_runs(repo, pr_no, noninteractive=True) + + if not all_runs: + log.warning("[{}][PR #{}] PR was closed without any workflow run", repo, pr_no) + return [] + + log.info("[{}][PR #{}] {} runs detected", repo, pr_no, len(all_runs)) + for i, run in enumerate(all_runs): + log.info(" {}. {}", i + 1, run) + + if not merged: + log.warning("[{}][PR #{}] deleting all {} runs for unmerged PR", pr_no, repo, len(all_runs)) + removed = delete_workflow_runs(repo, noop=noop, runs=all_runs) + preserved = [run for run in all_runs if run not in removed] + return _result(removed, preserved) + + log.activity("[{}][PR #{}] listing failed and skipped runs", repo, pr_no) + removed = list(pr_runs(repo, pr_no, "FAIL | ^SKIP | ^NULL", runs=all_runs, noninteractive=True)) + if not removed: + log.info("[{}][PR #{}] no failed nor skipped runs", repo, pr_no) + else: + log.info("[{}][PR #{}] {} failed or skipped runs", repo, pr_no, len(removed)) + + preserved = [] + + log.activity("[{}][PR #{}] listing good 'basic validation' runs", repo, pr_no) + basic_validation_runs = list( + pr_runs(repo, pr_no, "GOOD", "updated", runs=all_runs, noninteractive=True) + ) + if not basic_validation_runs: + log.warning("[{}][PR #{}] no good 'basic validation' run", repo, pr_no) + else: + basic_validation_delete = basic_validation_runs[:-1] + log.info( + "[{}][PR #{}] {} good 'basic validation' runs to delete", + repo, + pr_no, + len(basic_validation_delete), + ) + for i, run in enumerate(basic_validation_delete): + log.info(" {}. {}", i, run) + removed.extend(basic_validation_delete) + basic_validation_run = basic_validation_runs[-1] + log.info("[{}][PR #{}] 'basic validation' run: {}", repo, pr_no, basic_validation_run) + preserved.append(basic_validation_run) + + log.activity("[{}][PR #{}] listing good 'full validation' runs", repo, pr_no) + full_validation_runs = list( + pr_runs(repo, pr_no, "GOOD", "reviewed, approved", runs=all_runs, noninteractive=True) + ) + if not full_validation_runs: + log.error("[{}][PR #{}] no good 'full validation' run!", repo, pr_no) + raise RuntimeError(f"no good 'full validation' run for PR #{pr_no} of {repo}") + else: + full_validation_delete = full_validation_runs[:-1] + log.info( + "[{}][PR #{}] {} good 'full validation' runs to delete", + repo, + pr_no, + len(full_validation_delete), + ) + for i, run in enumerate(full_validation_delete): + log.info(" {}. {}", i, run) + removed.extend(full_validation_delete) + full_validation_run = full_validation_runs[-1] + log.info("[{}][PR #{}] 'full validation' run: {}", repo, pr_no, full_validation_run) + preserved.append(full_validation_run) + + if preserved: + log.info("[{}][PR #{}] {} candidates for ARCHIVAL", repo, pr_no, len(preserved)) + if not ScriptNoninteractive: + removed.extend(select_workflow_runs(repo, runs=preserved, prompt="don't archive")) + else: + log.warning("[{}][PR #{}] no runs selected for ARCHIVAL", repo, pr_no) + + if removed: + log.info("[{}][PR #{}] {} candidates for DELETION", repo, pr_no, len(removed)) + actually_removed = delete_workflow_runs(repo, noop=noop, runs=removed) + else: + actually_removed = [] + log.info("[{}][PR #{}] no runs selected for DELETION", repo, pr_no) + + preserved.extend(run for run in removed if run not in actually_removed) + + if not actually_removed: + log.info("[{}][PR #{}] no runs deleted", repo, pr_no) + else: + log.warning("[{}][PR #{}] {} runs DELETED", repo, pr_no, len(actually_removed)) + + if not preserved: + log.warning("[{}][PR #{}] no runs archived", repo, pr_no) + else: + log.warning("[{}][PR #{}] {} runs ARCHIVED", repo, pr_no, len(preserved)) + + return _result(actually_removed, preserved) + + +############################################################################### +# Perform cleanup procedures after on a closed Pull Request +############################################################################### +def pr_runs( + repo: str, pr_no: int, result: str | None = None, category: str | None = None, **select_args +) -> Generator[WorkflowRun, None, None]: + filter = ( + f"{'^'+result+' ' if result else ''}'PR '#{pr_no} '[{category if category is not None else ''}" + ) + select_args.setdefault("prompt", f"runs for PR #{pr_no}") + return select_workflow_runs(repo, filter, **select_args) + + +############################################################################### +# Query the list of workflow runs from a repository. +# If no filter is specified, present the user with `fzf` to select targets. +# Otherwise, run in unattended mode with the provided filter. +# By default, the function will query GitHub and parse the result with jq. +# Optionally, the list of runs can be read from a pregenerated file (or stdin), +# or it can be passed explicitly with the `runs` parameter. +############################################################################### +def select_workflow_runs( + repo: str, + filter: str | None = None, + input: str | None = None, + runs: list[str] | None = None, + prompt: str | None = None, + noninteractive: bool = False, +) -> list[WorkflowRun]: + @contextlib.contextmanager + def _jqscript() -> Generator[Path, None, None]: + script = """\ +def symbol: + sub(""; "")? // "NULL" | + sub("skipped"; "SKIP") | + sub("success"; "GOOD") | + sub("startup_failure"; "FAIL") | + sub("cancelled"; "FAIL") | + sub("failure"; "FAIL"); + +[ .workflow_runs[] + | [ + (.conclusion | symbol), + .created_at, + .id, + .event, + .name + ] + ] +""" + + tmp_h = tempfile.NamedTemporaryFile() + script_file = Path(tmp_h.name) + script_file.write_text(script) + yield script_file + + def _read_and_parse_runs(input_stream: TextIO) -> list[WorkflowRun]: + return [ + run + for line in input_stream.readlines() + for sline in [line.decode().strip()] + if sline + for run in [parse(WorkflowRun, sline)] + if run + ] + + if runs: + target_runs = runs + elif input == "-": + target_runs = _read_and_parse_runs(sys.stdin) + elif input: + input_file = Path(input) + with input_file.open("r") as istream: + target_runs = _read_and_parse_runs(istream) + else: + with _jqscript() as jqscript: + query_cmd = [f"gh api --paginate /repos/{repo}/actions/runs" " | " f"jq -r -f {jqscript}"] + log.command(query_cmd, shell=True, check=True) + result = subprocess.run(query_cmd, shell=True, check=True, stdout=subprocess.PIPE) + target_runs = [] + if result.stdout: + run_entries = json.loads(result.stdout.decode()) + target_runs.extend(DataObject.build(WorkflowRun, *entry) for entry in run_entries) + if prompt is None: + prompt = "available runs" + sorted_runs = partial(sorted, key=lambda r: r.date) + fzf = fzf_filter( + filter=filter, inputs=sorted_runs(target_runs), prompt=prompt, noninteractive=noninteractive + ) + return sorted_runs(_read_and_parse_runs(fzf.stdout)) + + +############################################################################### +# Filter a list using fzf +############################################################################### +def fzf_filter( + filter: str | None = None, + inputs: list | None = None, + keep_stdin_open: bool = False, + prompt: str | None = None, + noninteractive: bool = False, +) -> subprocess.Popen: + noninteractive = noninteractive or ScriptNoninteractive + if noninteractive: + filter_arg = "--filter" + else: + filter_arg = "--query" + + if filter is None: + filter = "" + + if prompt is None: + prompt = "" + # if prompt[-2:] != "> ": + prompt += " (TAB: select, ESC: none)> " + + fzf_cmd = ["fzf", "-0", "--tac", "--no-sort", "--multi", "--prompt", prompt, filter_arg, filter] + log.command(fzf_cmd) + fzf = subprocess.Popen(fzf_cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE) + if inputs: + for run in inputs: + line = str(run).strip() + fzf.stdin.write(line.encode()) + fzf.stdin.write("\n".encode()) + if not keep_stdin_open: + fzf.stdin.close() + return fzf + + +############################################################################### +# Delete all (or a filtered subset) of the workflow runs from a repository, +############################################################################### +def delete_workflow_runs( + repo: str, + filter: str | None = None, + noop: bool = False, + input: str | None = None, + runs: list[str] | None = None, + prompt: str | None = None, +) -> list[WorkflowRun]: + def _delete_run(run: WorkflowRun): + delete_cmd = ["gh", "api", "-X", "DELETE", f"/repos/{repo}/actions/runs/{run.id}"] + log.command(delete_cmd, check=True) + subprocess.run(delete_cmd, check=True) + + deleted = [] + if prompt is None: + prompt = "runs to delete" + for run in select_workflow_runs(repo, filter, input, runs, prompt=prompt): + if not noop: + _delete_run(run) + deleted.append(run) + if noop: + log.warning("[{}] {} runs selected but not actually deleted", repo, len(deleted)) + else: + log.warning("[{}] {} runs DELETED", repo, len(deleted)) + return deleted + + +############################################################################### +# List available packages for the current user or an organization +############################################################################### +def _gh_api(url: str, jq_filter: str, default: object = None) -> dict | list | None: + cmd = [ + "gh api " + f"-H 'Accept: {GitHubApiAccept}' " + f"-H 'X-GitHub-Api-Version: {GitHubApiVersion}' " + f"{url} | " + f"jq '{jq_filter}'" + ] + log.command(cmd, shell=True, check=True) + result = subprocess.run(cmd, shell=True, check=True, stdout=subprocess.PIPE) + if not result.stdout: + return default + return json.loads(result.stdout.decode()) + + +def select_packages( + org: str | None = None, + filter: str | None = None, + package_type: str = "container", + prompt: str | None = None, + noninteractive: bool = False, +) -> list[Package]: + def _ls_packages() -> Generator[Package, None, None]: + jq_filter = ( + "[ (.[] | [.id, .name, .visibility, .repository.full_name , .created_at, .updated_at]) ]" + ) + url = ( + f"/orgs/{org}/packages?package_type={package_type}" + if org + else "/user/packages?package_type={package_type}" + ) + log.activity("listing packages for {}", org if org else "current user") + packages = _gh_api(url, jq_filter, default=[]) + for pkg_entry in packages: + pkg = build(Package, *pkg_entry) + yield pkg + + def _read_and_parse_package(input_stream: TextIO) -> list[Package]: + return [ + pkg + for line in input_stream.readlines() + for sline in [line.decode().strip()] + if sline + for pkg in [parse(Package, sline)] + if pkg + ] + + packages = list(_ls_packages()) + if prompt is None: + prompt = "available packages" + sort_packages = partial(sorted, key=lambda p: p.updated_at) + fzf = fzf_filter( + filter=filter, inputs=sort_packages(packages), prompt=prompt, noninteractive=noninteractive + ) + return sort_packages(_read_and_parse_package(fzf.stdout)) + + +############################################################################### +# List package versions +############################################################################### +def select_package_versions( + package: str, + org: str | None = None, + filter: str | None = None, + package_type: str = "container", + prompt: str | None = None, + noninteractive: bool = False, +) -> list[str]: + def _ls_versions() -> Generator[PackageVersion, None, None]: + jq_filter = "[ (.[] | [.id, .name, .metadata.container.tags, .created_at, .updated_at]) ]" + url = ( + f"/orgs/{org}/packages/{package_type}/{package}/versions" + if org + else f"/user/packages/{package_type}/{package}/versions" + ) + versions = _gh_api(url, jq_filter, default=[]) + for version_entry in versions: + version = build(PackageVersion, *version_entry) + yield version + + def _read_and_parse_versions(input_stream: TextIO) -> list[PackageVersion]: + return [ + pkg + for line in input_stream.readlines() + for sline in [line.decode().strip()] + if sline + for pkg in [parse(PackageVersion, sline)] + if pkg + ] + + versions = list(_ls_versions()) + if prompt is None: + prompt = f"available versions for {package}" + sort_versions = partial(sorted, key=lambda p: p.updated_at) + fzf = fzf_filter( + filter=filter, inputs=sort_versions(versions), prompt=prompt, noninteractive=noninteractive + ) + return sort_versions(_read_and_parse_versions(fzf.stdout)) + + +############################################################################### +# Delete package versions +############################################################################### +def delete_package_versions( + package: str, + org: str | None = None, + filter: str | None = None, + package_type: str = "container", + prompt: str | None = None, + noninteractive: bool = False, + noop: bool = False, +) -> list[str]: + def _delete_version(version: PackageVersion): + url = ( + f"/orgs/{org}/packages/{package_type}/{package}/versions/{version.id}" + if org + else f"/user/packages/{package_type}/{package}/versions/{version.id}" + ) + delete_cmd = ["gh", "api", "-X", "DELETE", url] + log.command(delete_cmd, check=True) + subprocess.run(delete_cmd, check=True) + + deleted = [] + if prompt is None: + prompt = "version to delete" + for version in select_package_versions( + package, org, filter, package_type, prompt, noninteractive + ): + if not noop: + _delete_version(version) + deleted.append(version) + package_label = package if not org else f"{org}/{package}" + if noop: + log.warning("[{}] {} version selected but not actually deleted", package_label, len(deleted)) + else: + log.warning("[{}] {} runs DELETED", package_label, len(deleted)) + return deleted + + +############################################################################### +# Command-line arguments parser +############################################################################### +def define_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser("ci-admin") + parser.set_defaults(action=None) + + parser.add_argument( + "-n", "--noop", help="Don't make any changes if possible.", default=False, action="store_true" + ) + + parser.add_argument( + "-R", "--raw", help="Don't process output (e.g. tabulate)", default=False, action="store_true" + ) + + parser.add_argument( + "-i", "--interactive", help="Run in interactive mode.", default=False, action="store_true" + ) + + subparsers = parser.add_subparsers(dest="action") + + parser_pr_closed = subparsers.add_parser( + "pr-closed", help="Clean up workflow runs for a closed PR." + ) + parser_pr_closed.add_argument( + "-r", "--repository", help="Target GitHub repository (owner/repo).", required=True + ) + parser_pr_closed.add_argument("-N", "--number", help="PR number.", required=True, type=int) + parser_pr_closed.add_argument( + "-m", "--merged", help="The PR was merged.", default=False, action="store_true" + ) + + parser_pr_ls_runs = subparsers.add_parser("pr-runs", help="List existing workflow runs for a PR.") + parser_pr_ls_runs.add_argument( + "-r", "--repository", help="Target GitHub repository (owner/repo).", required=True + ) + parser_pr_ls_runs.add_argument("-N", "--number", help="PR number.", required=True, type=int) + + parser_ls_runs = subparsers.add_parser( + "select-runs", help="List all workflow runs, or a subset matching an fzf filter." + ) + parser_ls_runs.add_argument( + "-r", "--repository", help="Target GitHub repository (owner/repo).", required=True + ) + parser_ls_runs.add_argument( + "-f", "--filter", help="Custom zfz filter to run in unattended mode.", default=None + ) + parser_ls_runs.add_argument( + "-i", + "--input", + help="Read entries from the specified file instead of querying GitHub. " + "Use - to read from stdin", + default=None, + ) + + parser_delete_runs = subparsers.add_parser( + "delete-runs", help="Delete all workflow runs, or a subset matching an fzf filter." + ) + parser_delete_runs.add_argument( + "-r", "--repository", help="Target GitHub repository (owner/repo).", required=True + ) + parser_delete_runs.add_argument( + "-f", "--filter", help="Custom zfz filter to run in unattended mode.", default=None + ) + parser_delete_runs.add_argument( + "-i", + "--input", + help="Read entries from the specified file instead of querying GitHub. " + "Use - to read from stdin", + default=None, + ) + + parser_ls_pkgs = subparsers.add_parser( + "select-packages", help="List packages for an organization (or the current user)." + ) + parser_ls_pkgs.add_argument("-o", "--org", help="Target GitHub organization.", default=None) + parser_ls_pkgs.add_argument( + "-f", "--filter", help="Custom zfz filter to run in unattended mode.", default=None + ) + + parser_ls_versions = subparsers.add_parser( + "select-versions", + help="List versions for a package owned by an organization (or the current user).", + ) + parser_ls_versions.add_argument("-p", "--package", help="Target package.", required=True) + parser_ls_versions.add_argument("-o", "--org", help="Target GitHub organization.", default=None) + parser_ls_versions.add_argument( + "-f", "--filter", help="Custom zfz filter to run in unattended mode.", default=None + ) + + parser_delete_versions = subparsers.add_parser( + "delete-versions", help="Delete all workflow runs, or a subset matching an fzf filter." + ) + parser_delete_versions.add_argument("-p", "--package", help="Target package.", required=True) + parser_delete_versions.add_argument( + "-o", "--org", help="Target GitHub organization.", default=None + ) + parser_delete_versions.add_argument( + "-f", "--filter", help="Custom zfz filter to run in unattended mode.", default=None + ) + + return parser + + +############################################################################### +# Action dispatcher +############################################################################### +def dispatch_action(args: argparse.Namespace) -> None: + try: + if args.action == "pr-closed": + result = pr_closed( + repo=args.repository, pr_no=args.number, merged=args.merged, noop=args.noop + ) + tabulate_columns("action", *WorkflowRun._fields) + for removed, run in result: + output("DEL" if removed else "KEEP", str(run)) + elif args.action == "pr-runs": + tabulate_columns(*WorkflowRun._fields) + for run in pr_runs(repo=args.repository, pr_no=args.number): + output(str(run)) + elif args.action == "select-runs": + tabulate_columns(*WorkflowRun._fields) + for run in select_workflow_runs(repo=args.repository, filter=args.filter, input=args.input): + output(str(run)) + elif args.action == "delete-runs": + tabulate_columns(*WorkflowRun._fields) + for run in delete_workflow_runs( + repo=args.repository, filter=args.filter, noop=args.noop, input=args.input + ): + output(str(run)) + elif args.action == "select-packages": + tabulate_columns(*Package._fields) + for pkg in select_packages(org=args.org, filter=args.filter): + output(str(pkg)) + elif args.action == "select-versions": + tabulate_columns(*PackageVersion._fields) + for version in select_package_versions( + package=args.package, org=args.org, filter=args.filter + ): + output(str(version)) + elif args.action == "delete-versions": + tabulate_columns(*PackageVersion._fields) + for run in delete_package_versions( + package=args.package, + org=args.org, + filter=args.filter, + noop=args.noop, + ): + output(str(run)) + else: + raise RuntimeError("action not implemented", args.action) + finally: + if TabulateOutput: + TabulateOutput.stdin.close() + + +############################################################################### +# Script main() +############################################################################### +def main() -> None: + parser = define_parser() + args = parser.parse_args() + + if args.raw: + global TabulateEnabled + TabulateEnabled = False + + if args.interactive: + if ScriptNoninteractiveRequired: + raise RuntimeError("interactive requires a terminal") + global ScriptNoninteractive + ScriptNoninteractive = False + + if not args.action: + log.error("no action specified") + parser.print_help() + sys.exit(1) + + dispatch_action(args) + + +if __name__ == "__main__": + main() diff --git a/scripts/cleanup_closed_pull_request.sh b/scripts/cleanup_closed_pull_request.sh deleted file mode 100755 index 88b5f292..00000000 --- a/scripts/cleanup_closed_pull_request.sh +++ /dev/null @@ -1,165 +0,0 @@ -#!/bin/sh -e -if [ $# -ne 3 ]; then - printf -- "ERROR: invalid arguments\n" >&2 - printf -- "Usage: %s (true|false)\n" "$(basename $0)" >&2 - exit 1 -fi - -REPO="${1}" -PR_NO="${2}" -MERGED="${3:=false}" -UNO_DIR=$(cd $(dirname $0)/.. && pwd) - -if [ -n "${NOOP}" ]; then - OPT_NOOP="-e NOOP=y" -fi - -: "${GH_TOKEN:?GH_TOKEN is required but missing} -: "${ADMIN_IMAGE:=mentalsmash/uno-ci-admin:latest} - -log_msg() -{ - local lvl="${1}" - shift - printf -- "${lvl}: $@\n" >&2 -} - -RC=0 -case "${MERGED}" in - false) - PR_STATE=unmerged - log_msg INFO "deleting all runs for ${PR_STATE} PR #${PR_NO} of ${REPO}" - rc=0 - docker run --rm \ - -v ${UNO_DIR}:/uno \ - -e GH_TOKEN=${GH_TOKEN} \ - ${OPT_NOOP} \ - ${ADMIN_IMAGE} \ - /uno/scripts/cleanup_workflows.sh ${REPO} \ - "'PR #${REPO_NO} [" || rc=0 - if [ "${rc}" -ne 0 ]; then - log_msg ERROR "failed to delete all runs for ${PR_STATE} PR #${PR_NO} of ${REPO}" - RC=${rc} - else - log_msg INFO "deleted all runs for ${PR_STATE} PR #${PR_NO} of ${REPO}" - fi - ;; - true) - PR_STATE=merged - log_msg INFO "listing good 'basic validation' runs for ${PR_STATE} PR #${PR_NO} of ${REPO}" - BASIC_VALIDATION_ALL=$( - docker run --rm \ - -v ${UNO_DIR}:/uno \ - -e GH_TOKEN=${GH_TOKEN} \ - -e NOOP=y \ - ${ADMIN_IMAGE} \ - /uno/scripts/cleanup_workflows.sh ${REPO} \ - "^GOOD PR #${PR_NO} [changed]" - ) - log_msg INFO "$(echo "${BASIC_VALIDATION_ALL}" | grep -v '^$' | wc -l) good 'basic validation' runs detected for ${PR_STATE} PR #${PR_NO} of ${REPO}" - log_msg INFO "----------------------------------------------------------------" - echo "${BASIC_VALIDATION_ALL}" | grep -v '^$' >&2 - log_msg INFO "----------------------------------------------------------------" - BASIC_VALIDATION_RUN="$(echo "${BASIC_VALIDATION_ALL}" | grep -v '^$' | tail -1)" - if [ -z "${BASIC_VALIDATION_RUN}" ]; then - log_msg ERROR "no good 'basic validation' run detected for ${PR_STATE} PR #${PR_NO} of ${REPO}" - exit 1 - else - BASIC_VALIDATION_DELETE="$(echo "${BASIC_VALIDATION_ALL}" | grep -v '^$' | head -n -1)" - log_msg INFO "- $(echo "${BASIC_VALIDATION_DELETE}" | wc -l) extra runs will be deleted" - fi - - log_msg INFO "listing good 'full validation' runs for ${PR_STATE} PR #${PR_NO} of ${REPO}" - FULL_VALIDATION_ALL=$( - docker run --rm \ - -v ${UNO_DIR}:/uno \ - -e GH_TOKEN=${GH_TOKEN} \ - -e NOOP=y \ - ${ADMIN_IMAGE} \ - sh -c "/uno/scripts/cleanup_workflows.sh ${REPO} '^GOOD PR #${PR_NO} [reviewed, approved]'" - ) - log_msg INFO "$(echo "${FULL_VALIDATION_ALL}" | grep -v '^$' | wc -l) good 'full validation' runs detected for ${PR_STATE} PR #${PR_NO} of ${REPO}" - log_msg INFO "----------------------------------------------------------------" - echo "${FULL_VALIDATION_ALL}" | grep -v '^$' >&2 - log_msg INFO "----------------------------------------------------------------" - FULL_VALIDATION_RUN="$(echo "${FULL_VALIDATION_ALL}" | grep -v '^$' | tail -1)" - if [ -z "${FULL_VALIDATION_RUN}" ]; then - log_msg ERROR "no good 'full validation' run detected for ${PR_STATE} PR #${PR_NO} of ${REPO}" - exit 1 - else - FULL_VALIDATION_DELETE="$(echo "${FULL_VALIDATION_ALL}" | grep -v '^$' | head -n -1)" - log_msg INFO "- $(echo "${FULL_VALIDATION_DELETE}" | wc -l) extra runs will be deleted" - fi - - log_msg INFO "BASIC VALIDATION run: '${BASIC_VALIDATION_RUN}'" - log_msg INFO "FULL VALIDATION run: '${FULL_VALIDATION_RUN}'" - - log_msg INFO "deleting failed runs for ${PR_STATE} PR #${PR_NO} of ${REPO}" - rc=0 - docker run --rm \ - -v ${UNO_DIR}:/uno \ - -e GH_TOKEN=${GH_TOKEN} \ - ${OPT_NOOP} \ - ${ADMIN_IMAGE} \ - /uno/scripts/cleanup_workflows.sh ${REPO} \ - "^FAIL | ^cancelled 'PR #${PR_NO} [" || rc=$? - if [ "${rc}" -ne 0 ]; then - log_msg WARNING "failed to delete failed runs for ${PR_STATE} PR #${PR_NO} of ${REPO}" - RC=${rc} - else - log_msg INFO "DELETED failed runs for ${PR_STATE} PR #${PR_NO} of ${REPO}" - fi - - if [ -n "${BASIC_VALIDATION_DELETE}" ]; then - log_msg INFO "deleting extra 'basic validation' runs for ${PR_STATE} PR #${PR_NO} of ${REPO}" - echo "${BASIC_VALIDATION_DELETE}" > .delete_runs.log - rc=0 - docker run --rm \ - -v $(pwd)/.delete_runs.log:/delete_runs.log \ - -v ${UNO_DIR}:/uno \ - -e GH_TOKEN=${GH_TOKEN} \ - ${OPT_NOOP} \ - ${ADMIN_IMAGE} \ - sh -c "cat /delete_runs.log | head -n -1 | RAW=y /uno/scripts/cleanup_workflows.sh ${REPO}" || rc=$? - rm .delete_runs.log - if [ "${rc}" -ne 0 ]; then - log_msg WARNING "failed to delete extra 'basic validation' runs for ${PR_STATE} PR #${PR_NO} of ${REPO}" - RC=${rc} - else - log_msg INFO "DELETED extra 'basic validation' runs for ${PR_STATE} PR #${PR_NO} of ${REPO}" - fi - fi - - if [ -n "${FULL_VALIDATION_DELETE}" ]; then - log_msg INFO "deleting extra 'full validation' runs for ${PR_STATE} PR #${PR_NO} of ${REPO}" - echo "${FULL_VALIDATION_DELETE}" > .delete_runs.log - rc=0 - docker run --rm \ - -v $(pwd)/.delete_runs.log:/delete_runs.log \ - -v ${UNO_DIR}:/uno \ \ - -e GH_TOKEN=${GH_TOKEN} \ - ${OPT_NOOP} \ - ${ADMIN_IMAGE} \ - sh -c "cat /delete_runs.log | head -n -1 | RAW=y /uno/scripts/cleanup_workflows.sh ${REPO}" - rm .delete_runs.log - if [ "${rc}" -ne 0 ]; then - log_msg WARNING "failed to delete extra 'full validation' runs for ${PR_STATE} PR #${PR_NO} of ${REPO}" - RC=${rc} - else - log_msg INFO "DELETED extra 'full validation' runs for ${PR_STATE} PR #${PR_NO} of ${REPO}" - fi - fi - ;; - *) - printf -- "ERROR: invalid MERGED value: '%s' (expected either 'true' or 'false')\n" "${MERGED}" >&2 - exit 1 - ;; -esac - -if [ "${RC}" -ne 0 ]; then - log_msg ERROR "errors encountered while processing ${PR_STATE} PR #${PR_NO} of ${REPO}" -else - log_msg INFO "finished processing ${PR_STATE} PR #${PR_NO} of ${REPO}" -fi - -exit ${RC} diff --git a/scripts/cleanup_images_gh.py b/scripts/cleanup_images_gh.py deleted file mode 100755 index 5c71685a..00000000 --- a/scripts/cleanup_images_gh.py +++ /dev/null @@ -1,136 +0,0 @@ -#!/usr/bin/python3 -# MIT License - -# Copyright (c) 2020 Fiona Klute - -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: - -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. - -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# PYTHON_ARGCOMPLETE_OK -import argparse -import getpass -import os -import requests -from datetime import datetime, timedelta - -__author__ = "Fiona Klute" -__version__ = "0.1" -__copyright__ = "Copyright (C) 2021 Fiona Klute" -__license__ = "MIT" - -# GitHub API documentation: https://docs.github.com/en/rest/reference/packages -github_api_accept = "application/vnd.github.v3+json" -# https://docs.github.com/en/rest/overview/api-versions?apiVersion=2022-11-28 -github_api_version = "2022-11-28" - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description="List versions of a GHCR container image you own, and " - "optionally delete (prune) old, untagged versions." - ) - parser.add_argument( - "--token", - "-t", - action="store_true", - help="ask for token input instead of using the " "GHCR_TOKEN environment variable", - ) - parser.add_argument("--container", default="hello-ghcr-meow", help="name of the container image") - parser.add_argument("--verbose", "-v", action="store_true", help="print extra debug info") - parser.add_argument( - "--prune-age", - type=float, - metavar="DAYS", - default=None, - help="delete untagged images older than DAYS days", - ) - parser.add_argument( - "--dry-run", - "-n", - action="store_true", - help="do not actually prune images, just list which " "would be pruned", - ) - - # enable bash completion if argcomplete is available - try: - import argcomplete - - argcomplete.autocomplete(parser) - except ImportError: - pass - - args = parser.parse_args() - - if args.token: - token = getpass.getpass("Enter Token: ") - elif "GHCR_TOKEN" in os.environ: - token = os.environ["GHCR_TOKEN"] - else: - raise ValueError("missing authentication token") - - s = requests.Session() - s.headers.update( - { - "Authorization": f"token {token}", - "Accept": github_api_accept, - "X-GitHub-Api-Version": github_api_version, - } - ) - - del_before = ( - datetime.now().astimezone() - timedelta(days=args.prune_age) - if args.prune_age is not None - else None - ) - if del_before: - print(f"Pruning images created before {del_before}") - - list_url: str | None = ( - "https://api.github.com/user/packages/" f"container/{args.container}/versions" - ) - - while list_url is not None: - r = s.get(list_url) - if "link" in r.headers and "next" in r.links: - list_url = r.links["next"]["url"] - if args.verbose: - print(f"More result pages, next is <{list_url}>") - else: - list_url = None - - versions = r.json() - if args.verbose: - reset = datetime.fromtimestamp(int(r.headers["x-ratelimit-reset"])) - print(f'{r.headers["x-ratelimit-remaining"]} requests remaining ' f'until {reset}') - print(versions) - - for v in versions: - created = datetime.fromisoformat(v["created_at"]) - metadata = v["metadata"]["container"] - print(f'{v["id"]}\t{v["name"]}\t{created}\t{metadata["tags"]}') - - # prune old untagged images if requested - if del_before is not None and created < del_before and len(metadata["tags"]) == 0: - if args.dry_run: - print(f'would delete {v["id"]}') - else: - # r = s.delete - print( - 'https://api.github.com/user/packages/' f'container/{args.container}/versions/{v["id"]}' - ) - # r.raise_for_status() - print(f'deleted {v["id"]}') diff --git a/scripts/cleanup_workflows.sh b/scripts/cleanup_workflows.sh deleted file mode 100755 index 7c319ecc..00000000 --- a/scripts/cleanup_workflows.sh +++ /dev/null @@ -1,94 +0,0 @@ -#!/usr/bin/env bash -# From: https://raw.githubusercontent.com/qmacro/dotfiles/230c6df494f239e9d1762794943847816e1b7c32/scripts/dwr -# Delete workflow runs - dwr - -# Given an "owner/repo" name, such as "qmacro/thinking-aloud", -# retrieve the workflow runs for that repo and present them in a -# list. Selected runs will be deleted. Uses the GitHub API. - -# Requires gh (GitHub CLI) and jq (JSON processor) - -# First version - -# (asorbini) Modified to take an optional filter argument to run in noninteractive mode - -declare REPO=${1:?No owner/repo specified} -FILTER="${2}" - -set -o errexit -set -o pipefail - - -jqscript() { - - cat <