From 3bf30ef06d7e483f88e3662202bfcbdad691f235 Mon Sep 17 00:00:00 2001 From: Hood Chatham Date: Mon, 24 Feb 2025 23:46:44 +0100 Subject: [PATCH] Add tools/backports.py to help with making patch releases The main purpose of this script is to automate the changelog transformations involved in backports. Many cherry-picks lead to changelog conflicts, and we need also to rearrange the changelog on the main branch. To make the backport branch, first we query the set of PRs that are tagged with 'needs-backport'. Then we sort them in chronological order by date merged. (This was annoying to do manually -- the github interface lets you sort PRs by creation date or last modified date but not by merge date). Then we cherry-pick each commit in order by merge date but also render the change log automatically. If the cherry-pick succeeds, we write out the new change log and amend the commit. If it fails, we write out the change log and add it. We also check if pyodide-build is in conflict and if so I take the new `pyodide-build` commit that. Then we try `git cherry-pick --continue`. If it still fails, we abort and ask the user to resolve conflicts manually, run `git cherry-pick --continue` and then rerun the script. For this to work, we need to set `rerere.enabled` and `rerere.autoupdate`. I implement a parser for the changelog to perform the transformations needed. This is the most complicated part, since we want to maintain the ordering and the structure while moving entries from one section to another. I also added utilities for adding or removing the "needs backport" label and for showing which backport PRs are missing changelog entries. --- tools/backport.py | 658 +++++++++++++++++++++++++++++++++++ tools/tests/test_backport.py | 350 +++++++++++++++++++ 2 files changed, 1008 insertions(+) create mode 100755 tools/backport.py create mode 100644 tools/tests/test_backport.py diff --git a/tools/backport.py b/tools/backport.py new file mode 100755 index 00000000000..312c106ab28 --- /dev/null +++ b/tools/backport.py @@ -0,0 +1,658 @@ +#!/usr/bin/env python3 + +""" +The main purpose of this script is to automate the changelog transformations +involved in backports. Many cherry-picks lead to changelog conflicts, and we +need also to rearrange the changelog on the main branch. + +We implement a parser for the changelog to perform the transformations needed. +This is the most complicated part, since we want to maintain the ordering and +the structure while moving entries from one section to another. + +There are also some miscellaneous utilities for adding or removing the "needs +backport" label and for showing which backport PRs are missing changelog +entries. +""" + +import argparse +import functools +import re +import subprocess +import sys +from collections import namedtuple +from copy import deepcopy +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any, Self + +CHANGELOG = Path(__file__).parents[1] / "docs/project/changelog.md" +NEEDS_BACKPORTS_CACHE = Path(__file__).parent / "needs_backport_prs_cached.txt" + + +def run( + args: list[str | Path], check: bool = True, **kwargs: Any +) -> subprocess.CompletedProcess[Any]: + result = subprocess.run(args, check=False, text=True, **kwargs) + if check and result.returncode: + print(f"Command failed with exit status {result.returncode}") + print("Command was:", " ".join(str(x) for x in args)) + sys.exit(result.returncode) + return result + + +def fetch_needs_backport_pr_numbers(args) -> tuple[int, ...]: + """Use gh cli to collect the set of PRs that are labeled as needs_backport. + + Then cache them to disk. This is the implementation for fetch-backports. + """ + result = run( + ["gh", "pr", "list", "--label", "needs backport", "--state", "closed"], + capture_output=True, + ) + lines = [line.split("\t", 1)[0] for line in result.stdout.splitlines()] + NEEDS_BACKPORTS_CACHE.write_text("\n".join(lines) + "\n") + + +@functools.cache +def get_needs_backport_pr_numbers() -> tuple[int, ...]: + """Read the set of backports we need to make from disk.""" + if not NEEDS_BACKPORTS_CACHE.exists(): + print( + f"error: {NEEDS_BACKPORTS_CACHE} does not exist. First run:\n" + + " ./tools/backport.py fetch-backports", + file=sys.stdout, + ) + sys.exit(1) + lines = NEEDS_BACKPORTS_CACHE.read_text().splitlines() + return tuple(int(line) for line in lines) + + +# +# Commit log parsing +# + +# we use history_idx to sort by age. +CommitInfo = namedtuple( + "CommitInfo", ["pr_number", "shorthash", "shortlog", "history_idx"] +) + + +def commits_to_prs(commits: list[CommitInfo]) -> list[int]: + return [c.pr_number for c in commits] + +class CommitHistory: + """Store the history of the github PRs with a map from pr_number to CommitInfo""" + + commits: dict[int, CommitInfo] + + @classmethod + def from_git(self): + result = run(["git", "log", "--oneline", "main"], capture_output=True) + lines = result.stdout.splitlines() + return CommitHistory(lines) + + def __init__(self, lines): + commits = {} + PR_NUMBER_RE = re.compile(r"\(#[0-9]+\)$") + for history_idx, line in enumerate(lines): + if not (m := PR_NUMBER_RE.search(line)): + continue + pr_number = int(m.group(0)[2:-1]) + shorthash, shortlog = line.split(" ", 1) + commits[pr_number] = CommitInfo(pr_number, shorthash, shortlog, history_idx) + + self.commits = commits + + def lookup_pr(self, pr_number: int) -> CommitInfo: + return self.commits[pr_number] + + +@functools.cache +def get_commits() -> list[CommitInfo]: + """Return the CommitInfo of the PRs we want to backport""" + pr_numbers = get_needs_backport_pr_numbers() + commit_history = CommitHistory.from_git() + commits = [commit_history.lookup_pr(x) for x in pr_numbers] + return sorted(commits, key=lambda c: -c.history_idx) + + +# +# Changelog parsing +# +# See tests in tools/tests/test_backports.py. + + +@dataclass +class ChangelogEntry: + """A changelog entry, represented as a list of strings. + + An entry is started by a line beginning with `-`. It ends when there is a + line starting with `#` (begins a new section/subsection), a blank line + (begins a new subsubsection) or `-` (begins a new entry). + + This is nearly the same thing as it's content. + """ + + content: list[str] = field(default_factory=list) + + def get_text(self) -> str: + if self.content: + return "\n".join(self.content) + "\n" + return "" + + def __bool__(self) -> bool: + return bool(self.content) + + def append(self, line: str) -> None: + self.content.append(line) + + +@dataclass +class ChangelogSubSubSection: + header: list[str] = field(default_factory=list) + entries: list[ChangelogEntry] = field(default_factory=list) + cur_entry: ChangelogEntry = field(default_factory=ChangelogEntry) + + def get_text(self) -> str: + """Unparse the subsubsection""" + header = "" + if self.header: + header = "\n".join(self.header) + "\n" + res = header + "".join(x.get_text() for x in self.entries) + # Special case: if the last entry already ends in a blank line, we don't + # add another one. This keeps the spacing more consistent with the + # backported entries. + if not res.endswith("\n\n"): + res += "\n" + return res + + def __bool__(self) -> bool: + return bool(self.header or self.entries or self.cur_entry) + + def append(self, line: str) -> None: + """Main parsing logic.""" + if line.startswith("-"): + self.finish_entry() + if self.cur_entry or line.startswith("-"): + self.cur_entry.append(line) + else: + self.header.append(line) + + def finish_entry(self) -> None: + """If cur_entry is nonempty, add it to entries. Then empty out cur_entry""" + if self.cur_entry: + self.entries.append(self.cur_entry) + self.cur_entry = ChangelogEntry() + + +@dataclass +class ChangelogSubSection: + """A changelog subsection + + Introduced by ### or ##. Ends when there is another line with ### or ##. + + header: + Consists of all the lines starting with and the subsection start "###" + line and including all content lines that do not start with -. Generally + this will be ### plus one or more empty lines. + + subsubsection: + The list of subsubsections. + + subsubsection: + Parser state. + """ + + header: list[str] = field(default_factory=list) + subsubsections: list[ChangelogSubSubSection] = field(default_factory=list) + cur_subsubsection: ChangelogSubSubSection = field(default_factory=ChangelogSubSubSection) + + def get_text(self) -> str: + """Unparse the subsection""" + header = "" + if self.header: + header = "\n".join(self.header) + "\n" + res = header + "".join(x.get_text() for x in self.subsubsections) + # Special case: if the last entry already ends in a blank line, we don't + # add another one. This keeps the spacing more consistent with the + # backported entries. + if not res.endswith("\n\n"): + res += "\n" + return res + + def __bool__(self) -> bool: + return bool(self.header or self.subsubsections or self.cur_subsubsection) + + def append(self, line: str) -> None: + """Main parsing logic.""" + if line.strip() == "": + if self.cur_subsubsection: + self.finish_subsubsection() + else: + self.header.append(line) + return + if self.cur_subsubsection or line.startswith("-"): + self.cur_subsubsection.append(line) + else: + self.header.append(line) + + def finish_subsubsection(self) -> None: + """If cur_subsubsection is nonempty, add it to entries. Then empty out cur_subsubsection""" + if self.cur_subsubsection: + self.cur_subsubsection.finish_entry() + self.subsubsections.append(self.cur_subsubsection) + self.cur_subsubsection = ChangelogSubSubSection() + + +PrChangelogIndex = namedtuple("PrChangelogIndex", ["subsection", "subsubsection", "entry", "is_unique"]) + + +@dataclass +class ChangelogSection: + """A changelog subsection + + Introduced by ##. Ends when there is a ##. + + header: + Other than the unreleased section we don't actually bother parsing out + the changelog. So for the "prelude" and "rest" sections, this is + actually all the content. + + For the unreleased and patch_release sections, this is only the content + up to the first entry or subsection. So that should include just the `## + Unreleased` line and a blank line or two. + + subsections: + The list of subsections. + + cur_subsection: + Parser state. + + pr_index: + For the unreleased section, we populate this with information about + where the release note for each PR is. Populated by create_pr_index(). + """ + + header: list[str] = field(default_factory=list) + subsections: list[ChangelogSubSection] = field(default_factory=list) + cur_subsection: ChangelogSubSection = field(default_factory=ChangelogSubSection) + pr_index: dict[int, PrChangelogIndex] = field(default_factory=dict) + + def get_text(self) -> str: + """Unparse the section""" + header = "" + if self.header: + header = "\n".join(self.header) + "\n" + return header + "".join(x.get_text() for x in self.subsections) + + def append(self, line: str) -> None: + """Main parsing logic.""" + if line.startswith("### "): + self.finish_subsection() + if self.cur_subsection or line.startswith(("-", "### ")): + self.cur_subsection.append(line) + else: + self.header.append(line) + + def append_lines(self, lines: list[str]) -> None: + for line in lines: + self.append(line) + + def finish_subsection(self) -> None: + """If cur_subsection is nonempty, add it to entries. Then empty out cur_entry""" + if self.cur_subsection: + self.cur_subsection.finish_subsubsection() + self.subsections.append(self.cur_subsection) + self.cur_subsection = ChangelogSubSection() + + def create_pr_index(self) -> None: + PR_NUMBER_RE = re.compile(r"{pr}`[0-9]+`") + for subsection_idx, subsection in enumerate(self.subsections): + for subsubsection_idx, subsubsection in enumerate(subsection.subsubsections): + for entry_idx, entry in enumerate(subsubsection.entries): + pr_strs = PR_NUMBER_RE.findall(entry.get_text()) + is_unique = len(pr_strs) == 1 + for pr_str in pr_strs: + pr = int(pr_str[5:-1]) + self.pr_index[pr] = PrChangelogIndex( + subsection_idx, subsubsection_idx, entry_idx, is_unique + ) + + def delete_entry(self, pr_changelog_index: PrChangelogIndex) -> None: + subsection = self.subsections[pr_changelog_index.subsection] + subsubsection = subsection.subsubsections[pr_changelog_index.subsubsection] + del subsubsection.entries[pr_changelog_index.entry] + if not subsubsection.entries: + del subsection.subsubsections[pr_changelog_index.subsubsection] + if not subsection.subsubsections: + del self.subsections[pr_changelog_index.subsection] + + +@dataclass +class Changelog: + """Class for keeping track of an item in inventory.""" + + file: Path | None = None + prelude: ChangelogSection = field(default_factory=ChangelogSection) + unreleased: ChangelogSection = field(default_factory=ChangelogSection) + patch_release: ChangelogSection = field(default_factory=ChangelogSection) + rest: ChangelogSection = field(default_factory=ChangelogSection) + + @classmethod + def from_file(cls, file): + return Changelog(file).parse(file.read_text()) + + @classmethod + def from_text(cls, text): + return Changelog().parse(text) + + def parse(self, changelog_text: str) -> Self: + changelog = changelog_text.splitlines() + + it = iter(changelog) + for line in it: + if line.startswith("## Unreleased"): + self.unreleased.header.append(line) + break + # We don't care what's in the prelude so it all goes in the header + self.prelude.header.append(line) + # Parse unreleased section + for line in it: + if line.startswith("## "): + self.unreleased.finish_subsection() + self.rest.header.append(line) + break + self.unreleased.append(line) + + # We don't care what's in the rest so it all goes in the header + self.rest.header.extend(it) + return self + + def get_text(self, include_unreleased=True): + # For the backports changelog we want to drop the unreleased section + # entirely. + unreleased = self.unreleased.get_text() if include_unreleased else "" + return ( + self.prelude.get_text() + + unreleased + + self.patch_release.get_text() + + self.rest.get_text() + ) + + def write_text(self, include_unreleased=True): + assert self.file + self.file.write_text(self.get_text(include_unreleased=include_unreleased)) + + def set_patch_release_notes(self, version: str, pr_numbers: list[int]) -> None: + """Given a list of PRs, check if they have a changelog entry in + "Unreleased". + + If so add the entry to the patch_release section. Don't remove the entry + from the unreleased section, just duplicate it. + """ + self.patch_release = ChangelogSection() + self.patch_release.append_lines( + [f"## Version {version}", "", "_Insert Date Here_", ""] + ) + backport_subsections = {} + backport_subsubsections = {} + + # Sort by order of appearance then add + changelog_indices = [ + pr_index + for pr_number in pr_numbers + if (pr_index := self.unreleased.pr_index.get(pr_number, None)) + ] + + changelog_indices = sorted( + changelog_indices, key=lambda idx: (idx.subsection, idx.subsubsection, idx.entry) + ) + for pr_index in changelog_indices: + subsection = self.unreleased.subsections[pr_index.subsection] + if pr_index.subsection in backport_subsections: + backport_subsection = backport_subsections[pr_index.subsection] + else: + backport_subsection = deepcopy(subsection) + backport_subsection.subsubsections = [] + backport_subsections[pr_index.subsection] = backport_subsection + self.patch_release.subsections.append(backport_subsection) + + subsubsection = subsection.subsubsections[pr_index.subsubsection] + subsub_index = (pr_index.subsection, pr_index.subsubsection) + if subsub_index in backport_subsubsections: + backport_subsubsection = backport_subsubsections[subsub_index] + else: + backport_subsubsection = deepcopy(subsubsection) + backport_subsubsection.entries = [] + backport_subsubsections[subsub_index] = backport_subsubsection + backport_subsection.subsubsections.append(backport_subsubsection) + + entry = subsubsection.entries[pr_index.entry] + backport_subsubsection.entries.append(entry) + + + def remove_release_notes_from_unreleased_section( + self, pr_numbers: list[int] + ) -> None: + # Have to do this in two passes: + # 1. collect up entries to delete + indices_to_delete = [] + for pr_number in pr_numbers: + if pr_index := self.unreleased.pr_index.get(pr_number, None): + indices_to_delete.append(pr_index) + + # 2. Sort by reverse order of appearance and then delete. + for idx in sorted( + indices_to_delete, key=lambda idx: (-idx.subsection, -idx.subsubsection, -idx.entry) + ): + self.unreleased.delete_entry(idx) + + +# +# Main commands +# + + +def add_backport_pr(args): + pr_number_str = args.pr_number + run( + [ + "gh", + "pr", + "edit", + pr_number_str.removeprefix("#"), + "--add-label", + "needs backport", + ] + ) + fetch_needs_backport_pr_numbers(None) + + +def remove_needs_backport_labels(args) -> None: + for pr_number in get_needs_backport_pr_numbers(): + run(["gh", "pr", "edit", str(pr_number), "--remove-label", "needs backport"]) + + +def show_missing_changelogs(args) -> None: + changelog = Changelog.from_file(CHANGELOG) + changelog.unreleased.create_pr_index() + commits = get_commits() + missing_changelogs = [ + commit + for commit in commits + if commit.pr_number not in changelog.unreleased.pr_index + ] + for commit in missing_changelogs: + if args.web: + run(["gh", "pr", "view", "-w", str(commit.pr_number)]) + else: + print(commit.pr_number, commit.shorthash, commit.shortlog) + + +def make_changelog_branch(args) -> None: + commits = get_commits() + version = args.new_version + run(["git", "fetch", "upstream", "main:main"]) + changelog = Changelog.from_file(CHANGELOG) + changelog.unreleased.create_pr_index() + run(["git", "switch", "main"]) + run(["git", "switch", "-C", f"changelog-for-{version}-tmp"]) + changelog.set_patch_release_notes(version, commits_to_prs(commits)) + changelog.remove_release_notes_from_unreleased_section(commits_to_prs(commits)) + changelog.write_text() + run(["git", "add", CHANGELOG]) + run(["git", "commit", "-m", f"Update changelog for v{version}"]) + + +def make_backport_branch(args) -> None: + """" + To make the backport branch, first we query the set of PRs that are tagged with + 'needs-backport'. Then we sort them in chronological order by date merged. + (This was annoying to do manually -- the github interface lets you sort PRs + by creation date or last modified date but not by merge date). + + Then we cherry-pick each commit in order by merge date but also render the change log + automatically. If the cherry-pick succeeds, we write out the new change log and amend + the commit. If it fails, we write out the change log and add it. We also check if + pyodide-build is in conflict and if so I take the new `pyodide-build` commit that. + Then we try `git cherry-pick --continue`. If it still fails, we abort and ask the user + to resolve conflicts manually, run `git cherry-pick --continue` and then rerun the script. + For this to work, we need to set `rerere.enabled` and `rerere.autoupdate`. + """ + commits = get_commits() + version = args.new_version + run(["git", "fetch", "upstream", "main:main"]) + run(["git", "fetch", "upstream", "stable:stable"]) + run(["git", "config", "rerere.enabled", "true"]) + run(["git", "config", "rerere.autoupdate", "true"]) + changelog = Changelog.from_file(CHANGELOG) + changelog.unreleased.create_pr_index() + run(["git", "switch", "stable"]) + run(["git", "submodule", "update"]) + run(["git", "switch", "-C", f"backports-for-{version}-tmp"]) + for n, cur_commit in enumerate(commits): + result = run( + ["git", "-c", "core.editor=true", "cherry-pick", cur_commit.shorthash], + check=False, + capture_output=True, + ) + for line in result.stdout.splitlines(): + # We need to resolve submodule conflicts ourselves. We always pick + # the submodule version from the commit we are cherry-picking. + if not line.startswith("CONFLICT (submodule)"): + continue + path = line.partition("Merge conflict in ")[-1] + run(["git", "checkout", cur_commit.shorthash, "--", path], capture_output=True) + changelog.set_patch_release_notes(version, commits_to_prs(commits[: n + 1])) + changelog.write_text(include_unreleased=False) + run(["git", "add", "docs/project/changelog.md"]) + if result.returncode == 0: + run(["git", "commit", "--amend"]) + else: + result2 = run( + ["git", "cherry-pick", "--continue", "--no-edit"], check=False + ) + if result2.returncode: + print("\n\n") + print("\033[1;33mCherry-pick failed:\033[m") + print(" ", cur_commit.shortlog) + print("Resolve conflicts and run `git cherry-pick --continue` then rerun.") + sys.exit(result2.returncode) + + commits = get_commits() + + +def open_release_prs(args): + version = args.new_version + INSERT_ACTUAL_DATE = "- [ ] Insert the actual date in the changelog\n" + MERGE_DONT_SQUASH = "- [] Merge, don't squash" + BACKPORTS_BRANCH = f"backports-for-{version}-tmp" + CHANGELOG_BRANCH = f"changelog-for-{version}-tmp" + + run(["git", "switch", BACKPORTS_BRANCH]) + run( + [ + "gh", + "pr", + "create", + "--base", + "stable", + "--title", + f"Backports for v{version}", + "--body", + INSERT_ACTUAL_DATE + MERGE_DONT_SQUASH, + "--web", + ] + ) + + run(["git", "switch", CHANGELOG_BRANCH]) + run( + [ + "gh", + "pr", + "create", + "--base", + "main", + "--title", + f"Changelog for v{version}", + "--body", + INSERT_ACTUAL_DATE, + "--web", + ] + ) + + +def parse_args(): + parser = argparse.ArgumentParser("Apply backports") + parser.set_defaults(func=lambda args: parser.print_help()) + subparsers = parser.add_subparsers() + + add_backport_parser = subparsers.add_parser( + "add-backport-pr", help="Add the needs-backport label to a PR" + ) + add_backport_parser.add_argument("pr_number") + add_backport_parser.set_defaults(func=add_backport_pr) + + fetch_backports_parser = subparsers.add_parser( + "fetch-backports", + help="Fetch the list of PRs with the 'needs backport' label and cache to disk. Must be run first.", + ) + fetch_backports_parser.set_defaults(func=fetch_needs_backport_pr_numbers) + + missing_changelogs_parser = subparsers.add_parser( + "missing-changelogs", + help="List the PRs labeled as 'needs backport' that don't have a changelog", + ) + missing_changelogs_parser.add_argument( + "-w", "--web", action="store_true", help="Open missing changelog prs in browser" + ) + missing_changelogs_parser.set_defaults(func=show_missing_changelogs) + + changelog_branch_parse = subparsers.add_parser( + "changelog-branch", help="Make changelog-for-version branch" + ) + changelog_branch_parse.add_argument("new_version") + changelog_branch_parse.set_defaults(func=make_changelog_branch) + + backport_branch_parse = subparsers.add_parser( + "backport-branch", help="Make backports-for-version branch" + ) + backport_branch_parse.add_argument("new_version") + backport_branch_parse.set_defaults(func=make_backport_branch) + + open_release_prs_parse = subparsers.add_parser( + "open-release-prs", help="Open PRs for the backports and changelog branches" + ) + open_release_prs_parse.add_argument("new_version") + open_release_prs_parse.set_defaults(func=open_release_prs) + + return parser.parse_args() + + +def main(): + args = parse_args() + args.func(args) + + +if __name__ == "__main__": + main() diff --git a/tools/tests/test_backport.py b/tools/tests/test_backport.py new file mode 100644 index 00000000000..398ff63cf22 --- /dev/null +++ b/tools/tests/test_backport.py @@ -0,0 +1,350 @@ +import sys +from pathlib import Path +from textwrap import dedent +from copy import deepcopy + +sys.path.append(str(Path(__file__).parents[1])) +from backport import ( + Changelog, + ChangelogEntry, + ChangelogSection, + ChangelogSubSection, + ChangelogSubSubSection, + PrChangelogIndex, + run, +) + + +TEST_CHANGELOG = dedent( + """\ + # Change Log + + ## Unreleased + + - ABI break: Upgraded Emscripten to 3.1.63 {pr}`5343` {pr}`5350` + - Added `jiter` 0.8.2 {pr}`5388` + + - {{ Fix }} `mountNativeFS` API now correctly propagates the error. {pr}`5434` + - {{ Fix }} Since 0.27.1, Pyodide has been broken in iOS because iOS ships + broken wasm-gc support. Pyodide feature detects whether the runtime supports + wasm-gc and uses it if it is present. Unfortunately, iOS passes the feature + detection but wasm-gc doesn't work as expected. {pr}`5445` + + ### Packages + + - Added `h3` 4.2.1 {pr}`5436` + - Added `pcodec` 0.3.3 {pr}`5432` + + - {{ Breaking }} `matplotlib-pyodide` is not a default backend for matplotlib anymore. + Users who want to use `matplotlib-pyodide` need to explicitly call + `matplotlib.use("module://matplotlib_pyodide.wasm_backend")`. + {pr}`5374` + + ## Version 0.27.2 + """ +) + + +def make_entry(*lines): + return ChangelogEntry(content=list(lines)) + + +def make_subsubsection(*entries): + return ChangelogSubSubSection(entries=list(entries)) + + +def get_expected_changelog(): + unlabeled = ChangelogSubSection( + subsubsections=[ + make_subsubsection( + make_entry( + "- ABI break: Upgraded Emscripten to 3.1.63 {pr}`5343` {pr}`5350`", + ), + make_entry( + "- Added `jiter` 0.8.2 {pr}`5388`", + ), + ), + make_subsubsection( + make_entry( + "- {{ Fix }} `mountNativeFS` API now correctly propagates the error. {pr}`5434`", + ), + make_entry( + "- {{ Fix }} Since 0.27.1, Pyodide has been broken in iOS because iOS ships", + "broken wasm-gc support. Pyodide feature detects whether the runtime supports", + "wasm-gc and uses it if it is present. Unfortunately, iOS passes the feature", + "detection but wasm-gc doesn't work as expected. {pr}`5445`", + ), + ), + ], + ) + packages = ChangelogSubSection( + header=["### Packages", ""], + subsubsections=[ + make_subsubsection( + make_entry( + "- Added `h3` 4.2.1 {pr}`5436`", + ), + make_entry( + "- Added `pcodec` 0.3.3 {pr}`5432`", + ), + ), + make_subsubsection( + make_entry( + "- {{ Breaking }} `matplotlib-pyodide` is not a default backend for matplotlib anymore.", + "Users who want to use `matplotlib-pyodide` need to explicitly call", + '`matplotlib.use("module://matplotlib_pyodide.wasm_backend")`.', + "{pr}`5374`", + ), + ), + ], + ) + return Changelog( + prelude=ChangelogSection(header=["# Change Log", ""]), + unreleased=ChangelogSection( + header=["## Unreleased", ""], + subsections=[unlabeled, packages], + ), + rest=ChangelogSection(header=["## Version 0.27.2"]), + ) + + +def test_roundtrip(): + parsed = Changelog.from_text(TEST_CHANGELOG) + assert parsed.get_text() == TEST_CHANGELOG + + +def test_parsed(): + parsed = Changelog.from_text(TEST_CHANGELOG) + assert parsed == get_expected_changelog() + + +def test_unparse(): + changelog = get_expected_changelog() + unreleased = changelog.unreleased + [unlabeled, packages] = unreleased.subsections + assert unlabeled.subsubsections[0].entries[0].get_text() == dedent( + """\ + - ABI break: Upgraded Emscripten to 3.1.63 {pr}`5343` {pr}`5350` + """ + ) + assert unlabeled.subsubsections[0].get_text() == dedent( + """\ + - ABI break: Upgraded Emscripten to 3.1.63 {pr}`5343` {pr}`5350` + - Added `jiter` 0.8.2 {pr}`5388` + + """ + ) + assert unlabeled.get_text() == dedent( + """\ + - ABI break: Upgraded Emscripten to 3.1.63 {pr}`5343` {pr}`5350` + - Added `jiter` 0.8.2 {pr}`5388` + + - {{ Fix }} `mountNativeFS` API now correctly propagates the error. {pr}`5434` + - {{ Fix }} Since 0.27.1, Pyodide has been broken in iOS because iOS ships + broken wasm-gc support. Pyodide feature detects whether the runtime supports + wasm-gc and uses it if it is present. Unfortunately, iOS passes the feature + detection but wasm-gc doesn't work as expected. {pr}`5445` + + """ + ) + assert packages.get_text() == dedent( + """\ + ### Packages + + - Added `h3` 4.2.1 {pr}`5436` + - Added `pcodec` 0.3.3 {pr}`5432` + + - {{ Breaking }} `matplotlib-pyodide` is not a default backend for matplotlib anymore. + Users who want to use `matplotlib-pyodide` need to explicitly call + `matplotlib.use("module://matplotlib_pyodide.wasm_backend")`. + {pr}`5374` + + """ + ) + + +def test_pr_index(): + changelog = Changelog.from_text(TEST_CHANGELOG) + unreleased = changelog.unreleased + unreleased.create_pr_index() + assert unreleased.pr_index == { + 5343: PrChangelogIndex(0, 0, 0, False), + 5350: PrChangelogIndex(0, 0, 0, False), + 5374: PrChangelogIndex(1, 1, 0, True), + 5388: PrChangelogIndex(0, 0, 1, True), + 5432: PrChangelogIndex(1, 0, 1, True), + 5434: PrChangelogIndex(0, 1, 0, True), + 5436: PrChangelogIndex(1, 0, 0, True), + 5445: PrChangelogIndex(0, 1, 1, True), + } + + +def test_add_backported_entries(): + changelog = Changelog.from_text(TEST_CHANGELOG) + changelog.unreleased.create_pr_index() + changelog.set_patch_release_notes("0.27.3", [5388]) + assert changelog.patch_release.get_text() == dedent( + """\ + ## Version 0.27.3 + + _Insert Date Here_ + + - Added `jiter` 0.8.2 {pr}`5388` + + """ + ) + changelog.set_patch_release_notes("0.27.3", [5436]) + assert changelog.patch_release.get_text() == dedent( + """\ + ## Version 0.27.3 + + _Insert Date Here_ + + ### Packages + + - Added `h3` 4.2.1 {pr}`5436` + + """ + ) + changelog.set_patch_release_notes("0.27.3", [5436, 5432]) + assert changelog.patch_release.get_text() == dedent( + """\ + ## Version 0.27.3 + + _Insert Date Here_ + + ### Packages + + - Added `h3` 4.2.1 {pr}`5436` + - Added `pcodec` 0.3.3 {pr}`5432` + + """ + ) + changelog.set_patch_release_notes("0.27.3", [5432, 5436]) + assert changelog.patch_release.get_text() == dedent( + """\ + ## Version 0.27.3 + + _Insert Date Here_ + + ### Packages + + - Added `h3` 4.2.1 {pr}`5436` + - Added `pcodec` 0.3.3 {pr}`5432` + + """ + ) + changelog.set_patch_release_notes("0.27.3", [5388, 5434]) + assert changelog.patch_release.get_text() == dedent( + """\ + ## Version 0.27.3 + + _Insert Date Here_ + + - Added `jiter` 0.8.2 {pr}`5388` + + - {{ Fix }} `mountNativeFS` API now correctly propagates the error. {pr}`5434` + + """ + ) + + changelog.set_patch_release_notes("0.27.3", [5432, 5388, 5434, 5436]) + assert changelog.patch_release.get_text() == dedent( + """\ + ## Version 0.27.3 + + _Insert Date Here_ + + - Added `jiter` 0.8.2 {pr}`5388` + + - {{ Fix }} `mountNativeFS` API now correctly propagates the error. {pr}`5434` + + ### Packages + + - Added `h3` 4.2.1 {pr}`5436` + - Added `pcodec` 0.3.3 {pr}`5432` + + """ + ) + +def test_remove_backported_entries(): + orig_changelog = Changelog.from_text(TEST_CHANGELOG) + orig_changelog.unreleased.create_pr_index() + + changelog = deepcopy(orig_changelog) + changelog.remove_release_notes_from_unreleased_section([5374, 5445]) + assert changelog.unreleased.get_text() == dedent( + """\ + ## Unreleased + + - ABI break: Upgraded Emscripten to 3.1.63 {pr}`5343` {pr}`5350` + - Added `jiter` 0.8.2 {pr}`5388` + + - {{ Fix }} `mountNativeFS` API now correctly propagates the error. {pr}`5434` + + ### Packages + + - Added `h3` 4.2.1 {pr}`5436` + - Added `pcodec` 0.3.3 {pr}`5432` + + """ + ) + + changelog = deepcopy(orig_changelog) + changelog.remove_release_notes_from_unreleased_section([5374, 5445, 5434]) + assert changelog.unreleased.get_text() == dedent( + """\ + ## Unreleased + + - ABI break: Upgraded Emscripten to 3.1.63 {pr}`5343` {pr}`5350` + - Added `jiter` 0.8.2 {pr}`5388` + + ### Packages + + - Added `h3` 4.2.1 {pr}`5436` + - Added `pcodec` 0.3.3 {pr}`5432` + + """ + ) + changelog = deepcopy(orig_changelog) + changelog.remove_release_notes_from_unreleased_section([5374, 5445, 5388]) + assert changelog.unreleased.get_text() == dedent( + """\ + ## Unreleased + + - ABI break: Upgraded Emscripten to 3.1.63 {pr}`5343` {pr}`5350` + + - {{ Fix }} `mountNativeFS` API now correctly propagates the error. {pr}`5434` + + ### Packages + + - Added `h3` 4.2.1 {pr}`5436` + - Added `pcodec` 0.3.3 {pr}`5432` + + """ + ) + changelog = deepcopy(orig_changelog) + changelog.remove_release_notes_from_unreleased_section([5374, 5445, 5388, 5343, 5434]) + assert changelog.unreleased.get_text() == dedent( + """\ + ## Unreleased + + ### Packages + + - Added `h3` 4.2.1 {pr}`5436` + - Added `pcodec` 0.3.3 {pr}`5432` + + """ + ) + changelog = deepcopy(orig_changelog) + changelog.remove_release_notes_from_unreleased_section([5374, 5445, 5388, 5436, 5432]) + assert changelog.unreleased.get_text() == dedent( + """\ + ## Unreleased + + - ABI break: Upgraded Emscripten to 3.1.63 {pr}`5343` {pr}`5350` + + - {{ Fix }} `mountNativeFS` API now correctly propagates the error. {pr}`5434` + + """ + )