Skip to content

Commit

Permalink
Test
Browse files Browse the repository at this point in the history
  • Loading branch information
lucasssvaz committed Feb 6, 2024
1 parent 00c77b1 commit 5e1414c
Show file tree
Hide file tree
Showing 2 changed files with 77 additions and 3 deletions.
69 changes: 69 additions & 0 deletions .github/workflows/fetchPRcommits.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
#!/bin/sh
# SPDX-License-Identifier: BSD-3-Clause

set -e

# This script fetches Pull Request commits missing from a shallow clone
# and creates a PR_SHAs.txt file. This script has a limit of 500 commits but the
# github API used has a lower limit of 250 commits.

# It does not rely on git merge-bases which basically don't work with
# shallow clones:
# https://github.com/thesofproject/linux/issues/2556

# Design goals:
#
# - Keep the code short and as simple as possible. No one is interested
# in maintaining this sort of script.
#
# - Fast and accurate for small Pull Requests
#
# - For large Pull Requests _with merges_ the only objective is to
# complete in a reasonable time; say less than 10 minutes. It's very
# unlikely will look at 250 checkpatch results and time optimizations
# should not make this script more complex.


# Sample usage:
# $0 thesoftproject/linux 2772
main()
{
local gh_project="$1"
local pr_number="$2"

printf '%s: fetching PR %d for project %s\n' "$0" "$pr_number" "$gh_project"

# As of March 2021, Github's documented limit is 250 commits
# Let's have another cap a 500.
# https://docs.github.com/en/rest/reference/pulls#list-commits-on-a-pull-request
local pagelen PRlen=0
for i in 1 2 3 4 5; do
curl -H 'Accept: application/vnd.github.v3+json' \
"https://api.github.com/repos/$gh_project/pulls/$pr_number/commits?per_page=100&page=$i" \
> commits_"$i".json
pagelen=$(jq length < commits_$i.json)
if [ "$pagelen" -eq 0 ]; then
break
fi
PRlen=$((PRlen + pagelen))
done

printf 'Found %d commits, SHA1 list is in PR_SHAs.txt\n' "$PRlen"

# 'cut' removes double quotes
cat commits_?.json |
jq '.[] | .sha' |
cut -b2-41 > PR_SHAs.txt

# PRlen+1 gets us the merge base for simple, linear histories. For
# pull requests with merges, depth=PRLEN goes already much further
# than needed and +1 makes little difference. It's not clear when
# and for what sort of PRs git fetching individual commits would be
# faster so keep a single and simple fetch for now.

set -x # this command may take a while so show it
git fetch --depth "$((PRlen+1))" "https://github.com/$gh_project" "pull/$pr_number/head"

}

main "$@"
11 changes: 8 additions & 3 deletions .github/workflows/pre-commit.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,21 +10,26 @@ jobs:
# contains(github.event.pull_request.labels.*.name, 'Status: Pending Merge')
name: Checking if any fixes are needed
runs-on: ubuntu-latest
env:
PR_NUM: ${{github.event.number}}
steps:
- name: Checkout latest commit
uses: actions/checkout@v4
with:
fetch-depth: 0
fetch-depth: 2
- name: Set up Python 3.8
uses: actions/setup-python@v5
with:
python-version: 3.8
- name: Get changed files
id: changed-files
uses: tj-actions/[email protected]
run: |
.github/workflows/fetchPRcommits.sh ${GITHUB_REPOSITORY} "$PR_NUM"
git --no-pager log --oneline --graph --decorate --max-count=50
- name: Install and run pre-commit in changed files
uses: pre-commit/[email protected]
with:
extra_args: --files ${{ steps.changed-files.outputs.all_changed_files }}
- uses: pre-commit-ci/[email protected]
- name: Push changes using pre-commit-ci-lite
uses: pre-commit-ci/[email protected]
if: always()

0 comments on commit 5e1414c

Please sign in to comment.