Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 19 additions & 6 deletions RELEASE_NOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,23 +2,36 @@

## Summary

This is a maintenance, template-only, bugfix release.
<!-- Here goes a general summary of what this release is about -->

## Upgrading

<!-- Here goes notes on how to upgrade from previous versions, including deprecations and what they should be replaced with -->

### Cookiecutter template

All upgrading should be done via the migration script or regenerating the templates.

```bash
curl -sSL https://raw.githubusercontent.com/frequenz-floss/frequenz-repo-config-python/v0.16.0/cookiecutter/migrate.py | python3
curl -sSL https://raw.githubusercontent.com/frequenz-floss/frequenz-repo-config-python/<tag>/cookiecutter/migrate.py | python3
```

But you might still need to adapt your code:

<!-- Here upgrade steps for cookiecutter specifically -->

## New Features

<!-- Here goes the main new features and examples or instructions on how to use them -->

### Cookiecutter template

<!-- Here new features for cookiecutter specifically -->

## Bug Fixes

<!-- Here goes notable bug fixes that are worth a special mention or explanation -->

### Cookiecutter template

- Added a migration step for api repositories to fix `mkdocs.yml` when the previous `mkdocstrings-python` v2 migration moved only `paths: ["src"]` under `handlers.python.options` but not `paths: ["py"]`.
- Fixed runners for jobs that require Docker and where wrongly converted to `ubuntu-slim` in v0.15.0, changing them back to `ubuntu-24.04` to avoid Docker-related failures. The template and the migration script were both updated to reflect this change.
- Updated the repo-config migration workflow template and migration script so existing repositories also add the `merge_group` trigger and skip the job unless the event is `pull_request_target`, allowing the workflow to be used as a required merge-queue check.
- Added a migration step to remove the copilot review request from the Protect version branch protection rules. This was also done by v0.15.0 in theory, but the migration step was wrong and didn't update it properly.
<!-- Here bug fixes for cookiecutter specifically -->
311 changes: 2 additions & 309 deletions cookiecutter/migrate.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@
And remember to follow any manual instructions for each run.
""" # noqa: E501

# pylint: disable=too-many-lines, too-many-locals, too-many-branches
# R0801 is similarity detection, as the template is always similar to the current script
# pylint: disable=too-many-lines, too-many-locals, too-many-branches, R0801

import hashlib
import json
Expand All @@ -38,18 +39,6 @@ def main() -> None:
"""Run the migration steps."""
# Add a separation line like this one after each migration step.
print("=" * 72)
print("Fixing repo-config migration merge queue trigger...")
migrate_repo_config_migration_merge_group_trigger()
print("=" * 72)
print("Fixing mkdocstrings-python v2 paths for api repos...")
migrate_api_mkdocs_mkdocstrings_paths()
print("=" * 72)
print("Migrating protolint and publish-to-pypi runners to ubuntu-24.04...")
migrate_docker_based_runners()
print("=" * 72)
print("Updating 'Protect version branches' GitHub ruleset...")
migrate_protect_version_branches_ruleset()
print("=" * 72)
print()

if _manual_steps:
Expand All @@ -72,302 +61,6 @@ def main() -> None:
print()


def migrate_api_mkdocs_mkdocstrings_paths() -> None:
"""Fix the mkdocstrings paths migration for api repositories."""
project_type = read_cookiecutter_str_var("type")
if project_type is None:
manual_step(
"Unable to detect the cookiecutter project type from "
".cookiecutter-replay.json; if this is an api project and "
'`mkdocs.yml` still has `paths: ["py"]` nested under '
"`handlers.python.options`, move it out of `options`."
)
return

if project_type != "api":
print(" Skipping mkdocs.yml (not an api project)")
return

filepath = Path("mkdocs.yml")
if not filepath.exists():
manual_step(
"Unable to find mkdocs.yml; if this project uses mkdocs, "
'make sure the `paths: ["py"]` config is under '
"`handlers.python`, not `handlers.python.options`."
)
return

old = ' options:\n paths: ["py"]'
new = ' paths: ["py"]\n options:'
current_template = (
' handlers:\n paths: ["py"]\n python:\n options:'
)
content = filepath.read_text(encoding="utf-8")

if old in content:
replace_file_contents_atomically(filepath, old, new, count=1)
print(f" Updated {filepath}: moved mkdocstrings api paths out of options")
return

if new in content or current_template in content:
print(f" Skipped {filepath}: mkdocstrings api paths already updated")
return

manual_step(
f"Could not find the api mkdocstrings path pattern in {filepath}. "
'If `paths: ["py"]` is still nested under `handlers.python.options`, '
"move it out of `options` according to the latest template."
)


def migrate_docker_based_runners() -> None:
"""Migrate Docker-based jobs to use ubuntu-24.04 runners.

The ``protolint`` and ``publish-to-pypi`` jobs need Docker, which is not
available on ``ubuntu-slim``. They should therefore run on
``ubuntu-24.04`` instead.
"""
workflows_dir = Path(".github") / "workflows"
protolint_new = (
" protolint:\n"
" name: Check proto files with protolint\n"
" runs-on: ubuntu-24.04"
)
publish_to_pypi_new = (
' needs: ["create-github-release"]\n runs-on: ubuntu-24.04'
)
migrations: dict[str, list[dict[str, Any]]] = {}

protolint_rule = {
"job": "protolint",
"required_for": "api repos",
"job_marker": " protolint:\n",
"old": [
(
" protolint:\n"
" name: Check proto files with protolint\n"
" runs-on: ubuntu-slim"
),
(
" protolint:\n"
" name: Check proto files with protolint\n"
" runs-on: ubuntu-latest"
),
],
"new": protolint_new,
}
project_type = read_cookiecutter_str_var("type")
if project_type is None:
manual_step(
"Unable to detect the cookiecutter project type from "
".cookiecutter-replay.json; cannot determine whether the protolint "
"runner migration applies."
)
elif project_type == "api":
migrations.setdefault("ci-pr.yaml", []).append(protolint_rule)
migrations.setdefault("ci.yaml", []).append(protolint_rule)
else:
print(" Skipping protolint runner migration (not an api project)")

github_org = read_cookiecutter_str_var("github_org")
if github_org is None:
manual_step(
"Unable to detect the cookiecutter GitHub organization from "
".cookiecutter-replay.json; cannot determine whether the "
"publish-to-pypi runner migration applies."
)
elif github_org == "frequenz-floss":
migrations.setdefault("ci.yaml", []).append(
{
"job": "publish-to-pypi",
"required_for": "frequenz-floss repos",
"job_marker": " publish-to-pypi:\n",
"old": [
(' needs: ["create-github-release"]\n runs-on: ubuntu-slim'),
(
' needs: ["create-github-release"]\n'
" runs-on: ubuntu-latest"
),
],
"new": publish_to_pypi_new,
}
)
else:
print(" Skipping publish-to-pypi runner migration (not a frequenz-floss repo)")

for filename, rules in migrations.items():
filepath = workflows_dir / filename
if not filepath.exists():
for rule in rules:
manual_step(
f" Expected to find {filepath} for job {rule['job']} in "
f"{rule['required_for']}. Please add or update that job to use "
"`runs-on: ubuntu-24.04`."
)
continue

for rule in rules:
job = rule["job"]
required_for = rule["required_for"]
job_marker = rule["job_marker"]
new = rule["new"]
content = filepath.read_text(encoding="utf-8")

if job_marker not in content:
manual_step(
f" Expected to find job {job} in {filepath} for "
f"{required_for}. Please update it to use "
"`runs-on: ubuntu-24.04`."
)
continue

if new in content:
print(f" Skipped {filepath}: runner already up to date for job {job}")
continue

for old in rule["old"]:
if old in content:
replace_file_contents_atomically(
filepath, old, new, content=content
)
print(f" Updated {filepath}: migrated runner for job {job}")
break
else:
manual_step(
f" Pattern not found in {filepath}: please switch the runner "
f"for job {job} to `runs-on: ubuntu-24.04`."
)


def migrate_repo_config_migration_merge_group_trigger() -> None:
"""Trigger repo-config migration in the merge queue."""
filepath = Path(".github/workflows/repo-config-migration.yaml")
if not filepath.exists():
manual_step(
"Unable to find .github/workflows/repo-config-migration.yaml; if this "
"project uses the repo-config migration workflow, update it to trigger "
"on `merge_group` and skip the job unless the event is "
"`pull_request_target`."
)
return

content = filepath.read_text(encoding="utf-8")
old_on = (
"on:\n"
" pull_request_target:\n"
" types: [opened, synchronize, reopened, labeled, unlabeled]\n"
)
new_on = (
"on:\n"
" merge_group: # To allow using this as a required check for merging\n"
" pull_request_target:\n"
" types: [opened, synchronize, reopened, labeled, unlabeled]\n"
)
old_if = (
" if: contains(github.event.pull_request.title, 'the repo-config group')"
)
new_if = (
" # Skip if it was triggered by the merge queue. We only need the workflow to\n"
' # be executed to meet the "Required check" condition for merging, but we\n'
" # don't need to actually run the job, having the job present as Skipped is\n"
" # enough.\n"
" if: |\n"
" github.event_name == 'pull_request_target' &&\n"
" contains(github.event.pull_request.title, 'the repo-config group')"
)

updated = content
if old_on in updated:
updated = updated.replace(old_on, new_on, 1)

if old_if in updated:
updated = updated.replace(old_if, new_if, 1)

if updated != content:
replace_file_atomically(filepath, updated)
print(
" Updated .github/workflows/repo-config-migration.yaml: added "
"merge_group trigger"
)
return

if new_on in content and new_if in content:
print(
" Skipped .github/workflows/repo-config-migration.yaml: merge queue "
"trigger already configured"
)
return

manual_step(
"Could not find the expected repo-config migration workflow pattern in "
".github/workflows/repo-config-migration.yaml. If this repository uses "
"that workflow, add the `merge_group` trigger and make the job run only "
"for `pull_request_target` events according to the latest template."
)


def migrate_protect_version_branches_ruleset() -> None:
"""Update the 'Protect version branches' GitHub ruleset.

Uses the GitHub API (via ``gh`` CLI) to check whether the
'Protect version branches' ruleset on the current repository is aligned
with the current template. Recent template changes include:

* Removing the ``copilot_code_review`` rule.

If the ruleset is already aligned, prints an informational message.
If it needs updating, applies the changes via the API without removing
any existing required status checks.
If the ruleset is not found at all, issues a manual-step message that
points the user to the docs.
"""
rule_name = "Protect version branches"
docs_url = (
"https://frequenz-floss.github.io/frequenz-repo-config-python/"
"user-guide/start-a-new-project/configure-github/#rulesets"
)

# Build a link to the repo's ruleset settings for manual-step messages.
ruleset_url = get_ruleset_settings_url() or docs_url

# ── Fetch ruleset details ────────────────────────────────────────
ruleset = get_ruleset(rule_name)
if ruleset is None:
manual_step(
f"The '{rule_name}' GitHub ruleset was not found (or the gh CLI "
"is not available / the API call failed). "
"Please check whether it should exist for this repository. "
f"If it should, import it following the instructions at: {docs_url}"
)
return

# ── Detect and apply changes in-memory ───────────────────────────────
changes: list[str] = []
updated_rules = []

for rule in ruleset.get("rules", []):
if rule.get("type") == "copilot_code_review":
changes.append("remove copilot_code_review")
continue
updated_rules.append(rule)

if not changes:
print(f" Ruleset '{rule_name}' is already up to date")
return

# ── Push the update ───────────────────────────────────────────────────
ruleset["rules"] = updated_rules
if not update_ruleset(ruleset["id"], ruleset):
manual_step(
f"Failed to update the '{rule_name}' ruleset via the GitHub API. "
f"Please apply the following changes manually at {ruleset_url}: "
+ "; ".join(changes)
)
return

print(f" Updated ruleset '{rule_name}': " + ", ".join(changes))


def apply_patch(patch_content: str) -> None:
"""Apply a patch using the patch utility."""
subprocess.run(["patch", "-p1"], input=patch_content.encode(), check=True)
Expand Down
Loading