diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..b1eff40a --- /dev/null +++ b/.editorconfig @@ -0,0 +1,17 @@ +# top-most EditorConfig file +root = true + +# basic rules for all files +[*] +end_of_line = lf +insert_final_newline = true +charset = utf-8 +trim_trailing_whitespace = true + +[*.{py,js}] +indent_style = space +indent_size = 4 + +[*.html] +indent_style = space +indent_size = 1 diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 00000000..6fa14369 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,29 @@ +name: CI +on: + push: + branches: ["master"] + pull_request: + +jobs: + format: + runs-on: ubuntu-24.04 + name: "Linting and formatting" + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Run ruff check + uses: astral-sh/ruff-action@v2 + + - name: Run ruff format --check + uses: astral-sh/ruff-action@v2 + with: + args: "format --check" + + - name: Setup Biome + uses: biomejs/setup-biome@v2 + with: + version: latest + + - name: Run Biome + run: biome ci . diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yaml new file mode 100644 index 00000000..ca2edde2 --- /dev/null +++ b/.github/workflows/deploy.yaml @@ -0,0 +1,16 @@ +name: Deploy + +on: + push: + branches: + - main + - prod + +jobs: + deployment: + runs-on: ubuntu-latest + environment: ${{ github.ref_name }} + steps: + - name: Trigger deploy + run: | + curl -fsS -X POST ${{ secrets.HOOKURL }} -H "X-Key: ${{ secrets.HOOKSECRET }}" diff --git a/.github/workflows/transferdb.yaml b/.github/workflows/transferdb.yaml new file mode 100644 index 00000000..b2884e96 --- /dev/null +++ b/.github/workflows/transferdb.yaml @@ -0,0 +1,13 @@ +name: TransferDB + +# Manually triggered only +on: workflow_dispatch + +jobs: + transferdb: + runs-on: ubuntu-latest + environment: admin + steps: + - name: Trigger db transfer + run: | + curl -fsS -X POST ${{ secrets.TRANSFER_URL }} -H "X-Key: ${{ secrets.TRANSFER_KEY }}" diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..9b6ed017 --- /dev/null +++ b/Makefile @@ -0,0 +1,11 @@ +format: + ruff format + npx @biomejs/biome format --write + +lint: + ruff check + npx @biomejs/biome check + +lint-fix: + ruff check --fix + npx @biomejs/biome check --fix diff --git a/README.md b/README.md index a379f53d..a92e865f 100644 --- a/README.md +++ b/README.md @@ -6,12 +6,14 @@ A commitfest is a collection of patches and reviews for a project and is part of ## The Application -This is a Django 3.2 application backed by PostgreSQL and running on Python 3.x. +This is a Django 4.2 application backed by PostgreSQL and running on Python 3.x. ## Getting Started ### Ubuntu instructions +#### Install Dependencies / Configure Environment + First, prepare your development environment by installing pip, virtualenv, and postgresql-server-dev-X.Y. ```bash @@ -45,12 +47,24 @@ be provided. ./manage.py migrate ``` -You'll need either a database dump of the actual server's data or else to create a superuser: +#### Load data +For a quick start, you can load some dummy data into the database. Here's how you do that: + +``` +./manage.py loaddata auth_data.json +./manage.py loaddata commitfest_data.json +``` + +If you do this, the admin username and password are `admin` and `admin`. + +On the other hand, if you'd like to start from scratch instead, you can run the following command to create +a super user: ```bash ./manage.py createsuperuser ``` +#### Start application Finally, you're ready to start the application: ```bash @@ -62,10 +76,41 @@ admin interface, go back to the main interface. You're now logged in. ## Contributing -Before committing make sure to install the git pre-commit hook to adhere to the -codestyle. +Code formatting and linting is done using [`ruff`] and [`biome`]. You can run +formatting using `make format`. Linting can be done using `make lint` and +automatic fixing of linting errors can be done using `make lint-fix`. CI checks +that you adhere to these coding standards. + +You can install the git pre-commit hook to help you adhere to the codestyle: ```bash ln -s ../../tools/githook/pre-commit .git/hooks/ +``` + +[`ruff`]: https://docs.astral.sh/ruff/ +[`biome`]: https://biomejs.dev/ +### Discord + +If you want to discuss development of a fix/feature over chat. Please join the +`#commitfest-dev` channel on the ["PostgreSQL Hacking" Discord server][1] + +[1]: https://discord.gg/XZy2DXj7Wz + +### Staging server + +The staging server is available at: +User and password for the HTTP authentication popup are both `pgtest`. The +`main` branch is automatically deployed to the staging server. After some time +on the staging server, commits will be merged into the `prod` branch, which +automatically deploys to the production server. + +### Regenerating the database dump files + +If you'd like to regenerate the database dump files, you can run the following commands: ``` +./manage.py dumpdata auth --format=json --indent=4 --exclude=auth.permission > pgcommitfest/commitfest/fixtures/auth_data.json +./manage.py dumpdata commitfest --format=json --indent=4 > pgcommitfest/commitfest/fixtures/commitfest_data.json +``` + +If you want to reload data from dump file, you can run `drop owned by postgres;` in the `pgcommitfest` database first. diff --git a/biome.json b/biome.json new file mode 100644 index 00000000..1c3d6648 --- /dev/null +++ b/biome.json @@ -0,0 +1,36 @@ +{ + "$schema": "https://biomejs.dev/schemas/1.9.4/schema.json", + "vcs": { + "enabled": false, + "clientKind": "git", + "useIgnoreFile": false + }, + "files": { + "ignoreUnknown": true, + "ignore": [], + "include": [ + "media/commitfest/js/commitfest.js", + "media/commitfest/css/commitfest.css", + "biome.json" + ] + }, + "formatter": { + "enabled": true, + "indentStyle": "space", + "indentWidth": 4 + }, + "organizeImports": { + "enabled": true + }, + "linter": { + "enabled": true, + "rules": { + "recommended": true + } + }, + "javascript": { + "formatter": { + "quoteStyle": "double" + } + } +} diff --git a/dev_requirements.txt b/dev_requirements.txt index cedd81ce..66d7ae13 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -1,3 +1,4 @@ -r requirements.txt uwsgi pycodestyle +ruff diff --git a/media/commitfest/css/commitfest.css b/media/commitfest/css/commitfest.css index 07bfa102..fe6f6b80 100644 --- a/media/commitfest/css/commitfest.css +++ b/media/commitfest/css/commitfest.css @@ -4,47 +4,45 @@ /* For close button with float disabled */ .close-nofloat { - float: none !important; + float: none !important; } /* General form styling */ .form-horizontal div.form-group { - margin-bottom: 10px; + margin-bottom: 10px; } div.form-group div.controls ul { - list-style-type: none; - margin: 0px; - padding: 0px; + list-style-type: none; + margin: 0px; + padding: 0px; } div.form-group div.controls ul li { - display: inline; + display: inline; } div.form-group div.controls ul li label { - display: inline; - font-weight: normal; - vertical-align:middle; + display: inline; + font-weight: normal; + vertical-align: middle; } div.form-group div.controls ul li label input { - display: inline; - vertical-align:middle; + display: inline; + vertical-align: middle; } -div.form-group div.controls input[type='checkbox'] { - width: 10px; +div.form-group div.controls input[type="checkbox"] { + width: 10px; } div.form-group div.controls input.threadpick-input { - width: 80%; - display: inline; + width: 80%; + display: inline; } - - /* * Attach thread dialog */ #attachThreadListWrap.loading { display: block; - background: url('/media/commitfest/spinner.gif') no-repeat center; + background: url("/media/commitfest/spinner.gif") no-repeat center; width: 124px; height: 124px; margin: 0 auto; @@ -57,7 +55,7 @@ div.form-group div.controls input.threadpick-input { * Annotate message dialog */ #annotateMessageBody.loading { display: block; - background: url('/media/commitfest/spinner.gif') no-repeat center; + background: url("/media/commitfest/spinner.gif") no-repeat center; width: 124px; height: 124px; margin: 0 auto; @@ -65,3 +63,21 @@ div.form-group div.controls input.threadpick-input { #annotateMessageBody.loading * { display: none; } + +.cfbot-summary img { + margin-top: -3px; +} + +.github-logo { + height: 20px; +} + +.additions { + font-weight: bold; + color: green; +} + +.deletions { + font-weight: bold; + color: red; +} diff --git a/media/commitfest/github-mark.svg b/media/commitfest/github-mark.svg new file mode 100644 index 00000000..37fa923d --- /dev/null +++ b/media/commitfest/github-mark.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/media/commitfest/js/commitfest.js b/media/commitfest/js/commitfest.js index 2f1eddae..99b3f021 100644 --- a/media/commitfest/js/commitfest.js +++ b/media/commitfest/js/commitfest.js @@ -1,308 +1,382 @@ function verify_reject() { - return confirm('Are you sure you want to close this patch as Rejected?\n\nThis should only be done when a patch will never be applied - if more work is needed, it should instead be set to "Returned with Feedback" or "Moved to next CF".\n\nSo - are you sure?'); + return confirm( + 'Are you sure you want to close this patch as Rejected?\n\nThis should only be done when a patch will never be applied - if more work is needed, it should instead be set to "Returned with Feedback" or "Moved to next CF".\n\nSo - are you sure?', + ); } function verify_withdrawn() { - return confirm('Are you sure you want to close this patch as Withdrawn?\n\nThis should only be done when the author voluntarily withdraws the patch.\n\nSo - are you sure?'); + return confirm( + "Are you sure you want to close this patch as Withdrawn?\n\nThis should only be done when the author voluntarily withdraws the patch.\n\nSo - are you sure?", + ); } function verify_returned() { - return confirm('Are you sure you want to close this patch as Returned with Feedback?\n\nThis should be done if the patch is expected to be finished at some future time, but not necessarily in the next commitfest. If work is undergoing and expected in the next commitfest, it should instead be set to "Moved to next CF".\n\nSo - are you sure?'); + return confirm( + 'Are you sure you want to close this patch as Returned with Feedback?\n\nThis should be done if the patch is expected to be finished at some future time, but not necessarily in the next commitfest. If work is undergoing and expected in the next commitfest, it should instead be set to "Moved to next CF".\n\nSo - are you sure?', + ); } function verify_next() { - return confirm('Are you sure you want to move this patch to the next commitfest?\n\nThis means the patch will be marked as closed in this commitfest, but will automatically be moved to the next one. If no further work is expected on this patch, it should be closed with "Rejected" or "Returned with Feedback" instead.\n\nSo - are you sure?'); + return confirm( + 'Are you sure you want to move this patch to the next commitfest?\n\nThis means the patch will be marked as closed in this commitfest, but will automatically be moved to the next one. If no further work is expected on this patch, it should be closed with "Rejected" or "Returned with Feedback" instead.\n\nSo - are you sure?', + ); } function findLatestThreads() { - $('#attachThreadListWrap').addClass('loading'); - $('#attachThreadSearchButton').addClass('disabled'); - $.get('/ajax/getThreads/', { - 's': $('#attachThreadSearchField').val(), - 'a': $('#attachThreadAttachOnly').val(), - }).success(function(data) { - sel = $('#attachThreadList'); - sel.find('option').remove(); - $.each(data, function(m,i) { - sel.append($(''); - $.each(data, function(i,m) { - sel.append(''); - }); - }).always(function() { - $('#annotateMessageBody').removeClass('loading'); - }); + $("#annotateMessageBody").addClass("loading"); + $("#doAnnotateMessageButton").addClass("disabled"); + $.get("/ajax/getMessages", { + t: threadid, + }) + .success((data) => { + sel = $("#annotateMessageList"); + sel.find("option").remove(); + sel.append(''); + $.each(data, (i, m) => { + sel.append( + ``, + ); + }); + }) + .always(() => { + $("#annotateMessageBody").removeClass("loading"); + }); } function addAnnotation(threadid) { - $('#annotateThreadList').find('option').remove(); - $('#annotateMessage').val(''); - $('#annotateMsgId').val(''); - $('#annotateModal').modal(); - $('#annotateThreadList').focus(); + $("#annotateThreadList").find("option").remove(); + $("#annotateMessage").val(""); + $("#annotateMsgId").val(""); + $("#annotateModal").modal(); + $("#annotateThreadList").focus(); updateAnnotationMessages(threadid); - $('#doAnnotateMessageButton').unbind('click'); - $('#doAnnotateMessageButton').click(function() { - var msg = $('#annotateMessage').val(); - if (msg.length >= 500) { - alert('Maximum length for an annotation is 500 characters.\nYou should probably post an actual message in the thread!'); - return; - } - $('#doAnnotateMessageButton').addClass('disabled'); - $('#annotateMessageBody').addClass('loading'); - $.post('/ajax/annotateMessage/', { - 't': threadid, - 'msgid': $.trim($('#annotateMsgId').val()), - 'msg': msg - }).success(function(data) { - if (data != 'OK') { - alert(data); - $('#annotateMessageBody').removeClass('loading'); - } - else { - $('#annotateModal').modal('hide'); - location.reload(); - } - }).fail(function(data) { - alert('Failed to annotate message'); - $('#annotateMessageBody').removeClass('loading'); - }); + $("#doAnnotateMessageButton").unbind("click"); + $("#doAnnotateMessageButton").click(() => { + const msg = $("#annotateMessage").val(); + if (msg.length >= 500) { + alert( + "Maximum length for an annotation is 500 characters.\nYou should probably post an actual message in the thread!", + ); + return; + } + $("#doAnnotateMessageButton").addClass("disabled"); + $("#annotateMessageBody").addClass("loading"); + $.post("/ajax/annotateMessage/", { + t: threadid, + msgid: $.trim($("#annotateMsgId").val()), + msg: msg, + }) + .success((data) => { + if (data !== "OK") { + alert(data); + $("#annotateMessageBody").removeClass("loading"); + } else { + $("#annotateModal").modal("hide"); + location.reload(); + } + }) + .fail((data) => { + alert("Failed to annotate message"); + $("#annotateMessageBody").removeClass("loading"); + }); }); } function annotateMsgPicked() { - var val = $('#annotateMessageList').val(); + const val = $("#annotateMessageList").val(); if (val) { - $('#annotateMsgId').val(val); - annotateChanged(); + $("#annotateMsgId").val(val); + annotateChanged(); } } function annotateChanged() { /* Enable/disable the annotate button */ - if ($('#annotateMessage').val() != '' && $('#annotateMsgId').val()) { - $('#doAnnotateMessageButton').removeClass('disabled'); - } - else { - $('#doAnnotateMessageButton').addClass('disabled'); + if ($("#annotateMessage").val() !== "" && $("#annotateMsgId").val()) { + $("#doAnnotateMessageButton").removeClass("disabled"); + } else { + $("#doAnnotateMessageButton").addClass("disabled"); } } function deleteAnnotation(annid) { - if (confirm('Are you sure you want to delete this annotation?')) { - $.post('/ajax/deleteAnnotation/', { - 'id': annid, - }).success(function(data) { - location.reload(); - }).fail(function(data) { - alert('Failed to delete annotation!'); - }); + if (confirm("Are you sure you want to delete this annotation?")) { + $.post("/ajax/deleteAnnotation/", { + id: annid, + }) + .success((data) => { + location.reload(); + }) + .fail((data) => { + alert("Failed to delete annotation!"); + }); } } function flagCommitted(committer) { - $('#commitModal').modal(); - $('#committerSelect').val(committer); - $('#doCommitButton').unbind('click'); - $('#doCommitButton').click(function() { - var c = $('#committerSelect').val(); - if (!c) { - alert('You need to select a committer before you can mark a patch as committed!'); - return; - } - document.location.href='close/committed/?c=' + c; - }); - return false; + $("#commitModal").modal(); + $("#committerSelect").val(committer); + $("#doCommitButton").unbind("click"); + $("#doCommitButton").click(() => { + const c = $("#committerSelect").val(); + if (!c) { + alert( + "You need to select a committer before you can mark a patch as committed!", + ); + return; + } + document.location.href = `close/committed/?c=${c}`; + }); + return false; } - function sortpatches(sortby) { - $('#id_sortkey').val(sortby); + let sortkey = $('#id_sortkey').val() + if (sortkey == sortby) { + $('#id_sortkey').val(-sortby) + } else if(-sortkey == sortby){ + $('#id_sortkey').val(0) + } else { + $('#id_sortkey').val(sortby); + } $('#filterform').submit(); - return false; + return false; } function toggleButtonCollapse(buttonId, collapseId) { - $('#' + buttonId).button('toggle'); - $('#' + collapseId).toggleClass('in') + $(`#${buttonId}`).button("toggle"); + $(`#${collapseId}`).toggleClass("in"); } function togglePatchFilterButton(buttonId, collapseId) { - /* Figure out if we are collapsing it */ - if ($('#' + collapseId).hasClass('in')) { - /* Go back to ourselves without a querystring to reset the form, unless it's already empty */ - if (document.location.href.indexOf('?') > -1) { - document.location.href = '.'; - return; - } - } + /* Figure out if we are collapsing it */ + if ($(`#${collapseId}`).hasClass("in")) { + /* Go back to ourselves without a querystring to reset the form, unless it's already empty */ + if (document.location.href.indexOf("?") > -1) { + document.location.href = "."; + return; + } + } - toggleButtonCollapse(buttonId, collapseId); + toggleButtonCollapse(buttonId, collapseId); } - /* * Upstream user search dialog */ function search_and_store_user() { - $('#doSelectUserButton').unbind('click'); - $('#doSelectUserButton').click(function() { - if (!$('#searchUserList').val()) { return false; } + $("#doSelectUserButton").unbind("click"); + $("#doSelectUserButton").click(() => { + if (!$("#searchUserList").val()) { + return false; + } - /* Create this user locally */ - $.get('/ajax/importUser/', { - 'u': $('#searchUserList').val(), - }).success(function(data) { - if (data == 'OK') { - alert('User imported!'); - $('#searchUserModal').modal('hide'); - } else { - alert('Failed to import user: ' + data); - } - }).fail(function(data, statustxt) { - alert('Failed to import user: ' + statustxt); - }); + /* Create this user locally */ + $.get("/ajax/importUser/", { + u: $("#searchUserList").val(), + }) + .success((data) => { + if (data === "OK") { + alert("User imported!"); + $("#searchUserModal").modal("hide"); + } else { + alert(`Failed to import user: ${data}`); + } + }) + .fail((data, statustxt) => { + alert(`Failed to import user: ${statustxt}`); + }); - return false; + return false; }); - $('#searchUserModal').modal(); + $("#searchUserModal").modal(); } function findUsers() { - if (!$('#searchUserSearchField').val()) { - alert('No search term specified'); - return false; + if (!$("#searchUserSearchField").val()) { + alert("No search term specified"); + return false; } - $('#searchUserListWrap').addClass('loading'); - $('#searchUserSearchButton').addClass('disabled'); - $.get('/ajax/searchUsers/', { - 's': $('#searchUserSearchField').val(), - }).success(function(data) { - sel = $('#searchUserList'); - sel.find('option').remove(); - $.each(data, function(i,u) { - sel.append(''); + $("#searchUserListWrap").addClass("loading"); + $("#searchUserSearchButton").addClass("disabled"); + $.get("/ajax/searchUsers/", { + s: $("#searchUserSearchField").val(), + }) + .success((data) => { + sel = $("#searchUserList"); + sel.find("option").remove(); + $.each(data, (i, u) => { + sel.append( + ``, + ); + }); + }) + .always(() => { + $("#searchUserListWrap").removeClass("loading"); + $("#searchUserSearchButton").removeClass("disabled"); + searchUserListChanged(); }); - }).always(function() { - $('#searchUserListWrap').removeClass('loading'); - $('#searchUserSearchButton').removeClass('disabled'); - searchUserListChanged(); - }); - return false; + return false; } function searchUserListChanged() { - if ($('#searchUserList').val()) { - $('#doSelectUserButton').removeClass('disabled'); - } - else { - $('#doSelectUserButton').addClass('disabled'); - } + if ($("#searchUserList").val()) { + $("#doSelectUserButton").removeClass("disabled"); + } else { + $("#doSelectUserButton").addClass("disabled"); + } +} + +function addGitCheckoutToClipboard(patchId) { + navigator.clipboard.writeText(`git remote add commitfest https://github.com/postgresql-cfbot/postgresql.git +git fetch commitfest cf/${patchId} +git checkout commitfest/cf/${patchId} +`); } + +/* Build our button callbacks */ +$(document).ready(() => { + $("button.attachThreadButton").each((i, o) => { + const b = $(o); + b.click(() => { + $("#attachThreadAttachOnly").val("1"); + browseThreads((msgid, subject) => { + b.prev().val(msgid); + const description_field = $("#id_name"); + if (description_field.val() === "") { + description_field.val(subject); + } + return true; + }); + return false; + }); + }); +}); diff --git a/media/commitfest/needs_rebase_success.svg b/media/commitfest/needs_rebase_success.svg new file mode 100644 index 00000000..7f4113ff --- /dev/null +++ b/media/commitfest/needs_rebase_success.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/media/commitfest/new_failure.svg b/media/commitfest/new_failure.svg new file mode 100644 index 00000000..ff3012d0 --- /dev/null +++ b/media/commitfest/new_failure.svg @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/media/commitfest/new_success.svg b/media/commitfest/new_success.svg new file mode 100644 index 00000000..a0d9b7c4 --- /dev/null +++ b/media/commitfest/new_success.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/media/commitfest/old_failure.svg b/media/commitfest/old_failure.svg new file mode 100644 index 00000000..9d91d6c0 --- /dev/null +++ b/media/commitfest/old_failure.svg @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/media/commitfest/old_success.svg b/media/commitfest/old_success.svg new file mode 100644 index 00000000..2de4117e --- /dev/null +++ b/media/commitfest/old_success.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/media/commitfest/running.svg b/media/commitfest/running.svg new file mode 100644 index 00000000..a137d410 --- /dev/null +++ b/media/commitfest/running.svg @@ -0,0 +1,4 @@ + + + + diff --git a/media/commitfest/waiting_to_start.svg b/media/commitfest/waiting_to_start.svg new file mode 100644 index 00000000..efd371d4 --- /dev/null +++ b/media/commitfest/waiting_to_start.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/pgcommitfest/auth.py b/pgcommitfest/auth.py index 9343fc0f..af605119 100644 --- a/pgcommitfest/auth.py +++ b/pgcommitfest/auth.py @@ -24,27 +24,27 @@ # directory that's processed before the default django.contrib.admin) # -from django.http import HttpResponse, HttpResponseRedirect -from django.views.decorators.csrf import csrf_exempt -from django.contrib.auth.models import User -from django.contrib.auth.backends import ModelBackend +from django.conf import settings from django.contrib.auth import login as django_login from django.contrib.auth import logout as django_logout -from django.dispatch import Signal +from django.contrib.auth.backends import ModelBackend +from django.contrib.auth.models import User from django.db import transaction -from django.conf import settings +from django.dispatch import Signal +from django.http import HttpResponse, HttpResponseRedirect +from django.views.decorators.csrf import csrf_exempt import base64 +import hmac import json import socket -import hmac -from urllib.parse import urlencode, parse_qs +import time +from urllib.parse import parse_qs, urlencode + import requests +from Cryptodome import Random from Cryptodome.Cipher import AES from Cryptodome.Hash import SHA -from Cryptodome import Random -import time - # This signal fires when a user is created based on data from upstream. auth_user_created_from_upstream = Signal() @@ -66,24 +66,32 @@ def authenticate(self, username=None, password=None): # Two regular django views to interact with the login system #### + # Handle login requests by sending them off to the main site def login(request): - if 'next' in request.GET: + if "next" in request.GET: # Put together an url-encoded dict of parameters we're getting back, # including a small nonce at the beginning to make sure it doesn't # encrypt the same way every time. - s = "t=%s&%s" % (int(time.time()), urlencode({'r': request.GET['next']})) + s = "t=%s&%s" % (int(time.time()), urlencode({"r": request.GET["next"]})) # Now encrypt it r = Random.new() iv = r.read(16) - encryptor = AES.new(SHA.new(settings.SECRET_KEY.encode('ascii')).digest()[:16], AES.MODE_CBC, iv) - cipher = encryptor.encrypt(s.encode('ascii') + b' ' * (16 - (len(s) % 16))) # pad to 16 bytes - - return HttpResponseRedirect("%s?d=%s$%s" % ( - settings.PGAUTH_REDIRECT, - base64.b64encode(iv, b"-_").decode('utf8'), - base64.b64encode(cipher, b"-_").decode('utf8'), - )) + encryptor = AES.new( + SHA.new(settings.SECRET_KEY.encode("ascii")).digest()[:16], AES.MODE_CBC, iv + ) + cipher = encryptor.encrypt( + s.encode("ascii") + b" " * (16 - (len(s) % 16)) + ) # pad to 16 bytes + + return HttpResponseRedirect( + "%s?d=%s$%s" + % ( + settings.PGAUTH_REDIRECT, + base64.b64encode(iv, b"-_").decode("utf8"), + base64.b64encode(cipher, b"-_").decode("utf8"), + ) + ) else: return HttpResponseRedirect(settings.PGAUTH_REDIRECT) @@ -99,21 +107,27 @@ def logout(request): # Receive an authentication response from the main website and try # to log the user in. def auth_receive(request): - if 's' in request.GET and request.GET['s'] == "logout": + if "s" in request.GET and request.GET["s"] == "logout": # This was a logout request - return HttpResponseRedirect('/') + return HttpResponseRedirect("/") - if 'i' not in request.GET: + if "i" not in request.GET: return HttpResponse("Missing IV in url!", status=400) - if 'd' not in request.GET: + if "d" not in request.GET: return HttpResponse("Missing data in url!", status=400) # Set up an AES object and decrypt the data we received try: - decryptor = AES.new(base64.b64decode(settings.PGAUTH_KEY), - AES.MODE_CBC, - base64.b64decode(str(request.GET['i']), "-_")) - s = decryptor.decrypt(base64.b64decode(str(request.GET['d']), "-_")).rstrip(b' ').decode('utf8') + decryptor = AES.new( + base64.b64decode(settings.PGAUTH_KEY), + AES.MODE_CBC, + base64.b64decode(str(request.GET["i"]), "-_"), + ) + s = ( + decryptor.decrypt(base64.b64decode(str(request.GET["d"]), "-_")) + .rstrip(b" ") + .decode("utf8") + ) except UnicodeDecodeError: return HttpResponse("Badly encoded data found", 400) except Exception: @@ -126,23 +140,23 @@ def auth_receive(request): return HttpResponse("Invalid encrypted data received.", status=400) # Check the timestamp in the authentication - if (int(data['t'][0]) < time.time() - 10): + if int(data["t"][0]) < time.time() - 10: return HttpResponse("Authentication token too old.", status=400) # Update the user record (if any) try: - user = User.objects.get(username=data['u'][0]) + user = User.objects.get(username=data["u"][0]) # User found, let's see if any important fields have changed changed = [] - if user.first_name != data['f'][0]: - user.first_name = data['f'][0] - changed.append('first_name') - if user.last_name != data['l'][0]: - user.last_name = data['l'][0] - changed.append('last_name') - if user.email != data['e'][0]: - user.email = data['e'][0] - changed.append('email') + if user.first_name != data["f"][0]: + user.first_name = data["f"][0] + changed.append("first_name") + if user.last_name != data["l"][0]: + user.last_name = data["l"][0] + changed.append("last_name") + if user.email != data["e"][0]: + user.email = data["e"][0] + changed.append("email") if changed: user.save(update_fields=changed) except User.DoesNotExist: @@ -152,8 +166,9 @@ def auth_receive(request): # the database with a different userid. Instead of trying to # somehow fix that live, give a proper error message and # have somebody look at it manually. - if User.objects.filter(email=data['e'][0]).exists(): - return HttpResponse("""A user with email %s already exists, but with + if User.objects.filter(email=data["e"][0]).exists(): + return HttpResponse( + """A user with email %s already exists, but with a different username than %s. This is almost certainly caused by some legacy data in our database. @@ -162,26 +177,30 @@ def auth_receive(request): for you. We apologize for the inconvenience. -""" % (data['e'][0], data['u'][0]), content_type='text/plain') - - if getattr(settings, 'PGAUTH_CREATEUSER_CALLBACK', None): - res = getattr(settings, 'PGAUTH_CREATEUSER_CALLBACK')( - data['u'][0], - data['e'][0], - ['f'][0], - data['l'][0], +""" + % (data["e"][0], data["u"][0]), + content_type="text/plain", + ) + + if getattr(settings, "PGAUTH_CREATEUSER_CALLBACK", None): + res = getattr(settings, "PGAUTH_CREATEUSER_CALLBACK")( + data["u"][0], + data["e"][0], + ["f"][0], + data["l"][0], ) # If anything is returned, we'll return that as our result. # If None is returned, it means go ahead and create the user. if res: return res - user = User(username=data['u'][0], - first_name=data['f'][0], - last_name=data['l'][0], - email=data['e'][0], - password='setbypluginnotasha1', - ) + user = User( + username=data["u"][0], + first_name=data["f"][0], + last_name=data["l"][0], + email=data["e"][0], + password="setbypluginnotasha1", + ) user.save() auth_user_created_from_upstream.send(sender=auth_receive, user=user) @@ -193,39 +212,45 @@ def auth_receive(request): django_login(request, user) # Signal that we have information about this user - auth_user_data_received.send(sender=auth_receive, user=user, userdata={ - 'secondaryemails': data['se'][0].split(',') if 'se' in data else [] - }) + auth_user_data_received.send( + sender=auth_receive, + user=user, + userdata={"secondaryemails": data["se"][0].split(",") if "se" in data else []}, + ) # Finally, check of we have a data package that tells us where to # redirect the user. - if 'd' in data: - (ivs, datas) = data['d'][0].split('$') - decryptor = AES.new(SHA.new(settings.SECRET_KEY.encode('ascii')).digest()[:16], - AES.MODE_CBC, - base64.b64decode(ivs, b"-_")) - s = decryptor.decrypt(base64.b64decode(datas, "-_")).rstrip(b' ').decode('utf8') + if "d" in data: + (ivs, datas) = data["d"][0].split("$") + decryptor = AES.new( + SHA.new(settings.SECRET_KEY.encode("ascii")).digest()[:16], + AES.MODE_CBC, + base64.b64decode(ivs, b"-_"), + ) + s = decryptor.decrypt(base64.b64decode(datas, "-_")).rstrip(b" ").decode("utf8") try: rdata = parse_qs(s, strict_parsing=True) except ValueError: return HttpResponse("Invalid encrypted data received.", status=400) - if 'r' in rdata: + if "r" in rdata: # Redirect address - return HttpResponseRedirect(rdata['r'][0]) + return HttpResponseRedirect(rdata["r"][0]) # No redirect specified, see if we have it in our settings - if hasattr(settings, 'PGAUTH_REDIRECT_SUCCESS'): + if hasattr(settings, "PGAUTH_REDIRECT_SUCCESS"): return HttpResponseRedirect(settings.PGAUTH_REDIRECT_SUCCESS) - return HttpResponse("Authentication successful, but don't know where to redirect!", status=500) + return HttpResponse( + "Authentication successful, but don't know where to redirect!", status=500 + ) # Receive API calls from upstream, such as push changes to users @csrf_exempt def auth_api(request): - if 'X-pgauth-sig' not in request.headers: + if "X-pgauth-sig" not in request.headers: return HttpResponse("Missing signature header!", status=400) try: - sig = base64.b64decode(request.headers['X-pgauth-sig']) + sig = base64.b64decode(request.headers["X-pgauth-sig"]) except Exception: return HttpResponse("Invalid signature header!", status=400) @@ -233,7 +258,7 @@ def auth_api(request): h = hmac.digest( base64.b64decode(settings.PGAUTH_KEY), msg=request.body, - digest='sha512', + digest="sha512", ) if not hmac.compare_digest(h, sig): return HttpResponse("Invalid signature!", status=401) @@ -261,26 +286,38 @@ def _conditionally_update_record(rectype, recordkey, structkey, fieldmap, struct return None # Process the received structure - if pushstruct.get('type', None) == 'update': + if pushstruct.get("type", None) == "update": # Process updates! with transaction.atomic(): - for u in pushstruct.get('users', []): + for u in pushstruct.get("users", []): user = _conditionally_update_record( User, - 'username', 'username', + "username", + "username", { - 'firstname': 'first_name', - 'lastname': 'last_name', - 'email': 'email', + "firstname": "first_name", + "lastname": "last_name", + "email": "email", }, u, ) # Signal that we have information about this user (only if it exists) if user: - auth_user_data_received.send(sender=auth_api, user=user, userdata={ - k: u[k] for k in u.keys() if k not in ['firstname', 'lastname', 'email', ] - }) + auth_user_data_received.send( + sender=auth_api, + user=user, + userdata={ + k: u[k] + for k in u.keys() + if k + not in [ + "firstname", + "lastname", + "email", + ] + }, + ) return HttpResponse("OK", status=200) @@ -297,24 +334,24 @@ def user_search(searchterm=None, userid=None): # 10 seconds is already quite long. socket.setdefaulttimeout(10) if userid: - q = {'u': userid} + q = {"u": userid} else: - q = {'s': searchterm} + q = {"s": searchterm} r = requests.get( - '{0}search/'.format(settings.PGAUTH_REDIRECT), + "{0}search/".format(settings.PGAUTH_REDIRECT), params=q, ) if r.status_code != 200: return [] - (ivs, datas) = r.text.encode('utf8').split(b'&') + (ivs, datas) = r.text.encode("utf8").split(b"&") # Decryption time - decryptor = AES.new(base64.b64decode(settings.PGAUTH_KEY), - AES.MODE_CBC, - base64.b64decode(ivs, "-_")) - s = decryptor.decrypt(base64.b64decode(datas, "-_")).rstrip(b' ').decode('utf8') + decryptor = AES.new( + base64.b64decode(settings.PGAUTH_KEY), AES.MODE_CBC, base64.b64decode(ivs, "-_") + ) + s = decryptor.decrypt(base64.b64decode(datas, "-_")).rstrip(b" ").decode("utf8") j = json.loads(s) return j @@ -324,22 +361,24 @@ def user_search(searchterm=None, userid=None): def subscribe_to_user_changes(userid): socket.setdefaulttimeout(10) - body = json.dumps({ - 'u': userid, - }) + body = json.dumps( + { + "u": userid, + } + ) h = hmac.digest( base64.b64decode(settings.PGAUTH_KEY), - msg=bytes(body, 'utf-8'), - digest='sha512', + msg=bytes(body, "utf-8"), + digest="sha512", ) # Ignore the result code, just post it requests.post( - '{0}subscribe/'.format(settings.PGAUTH_REDIRECT), + "{0}subscribe/".format(settings.PGAUTH_REDIRECT), data=body, headers={ - 'X-pgauth-sig': base64.b64encode(h), + "X-pgauth-sig": base64.b64encode(h), }, ) @@ -359,15 +398,15 @@ def user_import(uid): u = u[0] - if User.objects.filter(username=u['u']).exists(): + if User.objects.filter(username=u["u"]).exists(): raise Exception("User already exists") u = User( - username=u['u'], - first_name=u['f'], - last_name=u['l'], - email=u['e'], - password='setbypluginnotsha1', + username=u["u"], + first_name=u["f"], + last_name=u["l"], + email=u["e"], + password="setbypluginnotsha1", ) u.save() diff --git a/pgcommitfest/commitfest/admin.py b/pgcommitfest/commitfest/admin.py index 0f7ffda8..8c8d62e5 100644 --- a/pgcommitfest/commitfest/admin.py +++ b/pgcommitfest/commitfest/admin.py @@ -1,10 +1,22 @@ from django.contrib import admin -from .models import * +from .models import ( + CfbotBranch, + CfbotTask, + CommitFest, + Committer, + MailThread, + MailThreadAttachment, + Patch, + PatchHistory, + PatchOnCommitFest, + TargetVersion, + Topic, +) class CommitterAdmin(admin.ModelAdmin): - list_display = ('user', 'active') + list_display = ("user", "active") class PatchOnCommitFestInline(admin.TabularInline): @@ -14,11 +26,16 @@ class PatchOnCommitFestInline(admin.TabularInline): class PatchAdmin(admin.ModelAdmin): inlines = (PatchOnCommitFestInline,) - list_display = ('name', ) + list_display = ("name",) class MailThreadAttachmentAdmin(admin.ModelAdmin): - list_display = ('date', 'author', 'messageid', 'mailthread',) + list_display = ( + "date", + "author", + "messageid", + "mailthread", + ) admin.site.register(Committer, CommitterAdmin) @@ -27,6 +44,8 @@ class MailThreadAttachmentAdmin(admin.ModelAdmin): admin.site.register(Patch, PatchAdmin) admin.site.register(PatchHistory) admin.site.register(TargetVersion) +admin.site.register(CfbotBranch) +admin.site.register(CfbotTask) admin.site.register(MailThread) admin.site.register(MailThreadAttachment, MailThreadAttachmentAdmin) diff --git a/pgcommitfest/commitfest/ajax.py b/pgcommitfest/commitfest/ajax.py index eaf7cdc8..329a83f9 100644 --- a/pgcommitfest/commitfest/ajax.py +++ b/pgcommitfest/commitfest/ajax.py @@ -1,18 +1,27 @@ -from django.shortcuts import get_object_or_404 -from django.http import HttpResponse, Http404 from django.conf import settings -from django.views.decorators.csrf import csrf_exempt from django.contrib.auth.decorators import login_required from django.contrib.auth.models import User from django.db import transaction +from django.http import Http404, HttpResponse +from django.shortcuts import get_object_or_404 +from django.views.decorators.csrf import csrf_exempt -import requests import json +import re import textwrap +import requests + from pgcommitfest.auth import user_search -from .models import CommitFest, Patch, MailThread, MailThreadAttachment -from .models import MailThreadAnnotation, PatchHistory + +from .models import ( + CommitFest, + MailThread, + MailThreadAnnotation, + MailThreadAttachment, + Patch, + PatchHistory, +) class HttpResponseServiceUnavailable(HttpResponse): @@ -23,16 +32,39 @@ class Http503(Exception): pass +def mockArchivesAPI(path): + with open(settings.MOCK_ARCHIVE_DATA, "r", encoding="utf-8") as file: + data = json.load(file) + for message in data: + message["atts"] = [] + + message_pattern = re.compile(r"^/message-id\.json/(?P[^/]+)$") + + message_match = message_pattern.match(path) + if message_match: + message_id = message_match.group("message_id") + return [message for message in data if message["msgid"] == message_id] + else: + return data + + def _archivesAPI(suburl, params=None): + if getattr(settings, "MOCK_ARCHIVES", False) and getattr( + settings, "MOCK_ARCHIVE_DATA" + ): + return mockArchivesAPI(suburl) + try: resp = requests.get( - "http{0}://{1}:{2}{3}".format(settings.ARCHIVES_PORT == 443 and 's' or '', - settings.ARCHIVES_SERVER, - settings.ARCHIVES_PORT, - suburl), + "http{0}://{1}:{2}{3}".format( + settings.ARCHIVES_PORT == 443 and "s" or "", + settings.ARCHIVES_SERVER, + settings.ARCHIVES_PORT, + suburl, + ), params=params, headers={ - 'Host': settings.ARCHIVES_HOST, + "Host": settings.ARCHIVES_HOST, }, timeout=settings.ARCHIVES_TIMEOUT, ) @@ -49,41 +81,43 @@ def _archivesAPI(suburl, params=None): def getThreads(request): - search = request.GET.get('s', None) - if request.GET.get('a', '0') == '1': + search = request.GET.get("s", None) + if request.GET.get("a", "0") == "1": attachonly = 1 else: attachonly = 0 # Make a JSON api call to the archives server - params = {'n': 100, 'a': attachonly} + params = {"n": 100, "a": attachonly} if search: - params['s'] = search + params["s"] = search - r = _archivesAPI('/list/pgsql-hackers/latest.json', params) - return sorted(r, key=lambda x: x['date'], reverse=True) + r = _archivesAPI("/list/pgsql-hackers/latest.json", params) + return sorted(r, key=lambda x: x["date"], reverse=True) def getMessages(request): - if 't' not in request.GET: + if "t" not in request.GET: raise Http404("Missing parameter") - threadid = request.GET['t'] + threadid = request.GET["t"] thread = MailThread.objects.get(pk=threadid) # Always make a call over to the archives api - r = _archivesAPI('/message-id.json/%s' % thread.messageid) - return sorted(r, key=lambda x: x['date'], reverse=True) + r = _archivesAPI("/message-id.json/%s" % thread.messageid) + return sorted(r, key=lambda x: x["date"], reverse=True) def refresh_single_thread(thread): - r = sorted(_archivesAPI('/message-id.json/%s' % thread.messageid), key=lambda x: x['date']) - if thread.latestmsgid != r[-1]['msgid']: + r = sorted( + _archivesAPI("/message-id.json/%s" % thread.messageid), key=lambda x: x["date"] + ) + if thread.latestmsgid != r[-1]["msgid"]: # There is now a newer mail in the thread! - thread.latestmsgid = r[-1]['msgid'] - thread.latestmessage = r[-1]['date'] - thread.latestauthor = r[-1]['from'] - thread.latestsubject = r[-1]['subj'] + thread.latestmsgid = r[-1]["msgid"] + thread.latestmessage = r[-1]["date"] + thread.latestauthor = r[-1]["from"] + thread.latestsubject = r[-1]["subj"] thread.save() parse_and_add_attachments(r, thread) # Potentially update the last mail date - if there wasn't already a mail on each patch @@ -95,142 +129,163 @@ def refresh_single_thread(thread): @transaction.atomic def annotateMessage(request): - thread = get_object_or_404(MailThread, pk=int(request.POST['t'])) - msgid = request.POST['msgid'] - msg = request.POST['msg'] + thread = get_object_or_404(MailThread, pk=int(request.POST["t"])) + msgid = request.POST["msgid"] + msg = request.POST["msg"] # Get the subject, author and date from the archives # We only have an API call to get the whole thread right now, so # do that, and then find our entry in it. - r = _archivesAPI('/message-id.json/%s' % thread.messageid) + r = _archivesAPI("/message-id.json/%s" % thread.messageid) for m in r: - if m['msgid'] == msgid: - annotation = MailThreadAnnotation(mailthread=thread, - user=request.user, - msgid=msgid, - annotationtext=msg, - mailsubject=m['subj'], - maildate=m['date'], - mailauthor=m['from']) + if m["msgid"] == msgid: + annotation = MailThreadAnnotation( + mailthread=thread, + user=request.user, + msgid=msgid, + annotationtext=msg, + mailsubject=m["subj"], + maildate=m["date"], + mailauthor=m["from"], + ) annotation.save() for p in thread.patches.all(): - PatchHistory(patch=p, by=request.user, what='Added annotation "%s" to %s' % (textwrap.shorten(msg, 100), msgid)).save_and_notify() + PatchHistory( + patch=p, + by=request.user, + what='Added annotation "%s" to %s' + % (textwrap.shorten(msg, 100), msgid), + ).save_and_notify() p.set_modified() p.save() - return 'OK' - return 'Message not found in thread!' + return "OK" + return "Message not found in thread!" @transaction.atomic def deleteAnnotation(request): - annotation = get_object_or_404(MailThreadAnnotation, pk=request.POST['id']) + annotation = get_object_or_404(MailThreadAnnotation, pk=request.POST["id"]) for p in annotation.mailthread.patches.all(): - PatchHistory(patch=p, by=request.user, what='Deleted annotation "%s" from %s' % (annotation.annotationtext, annotation.msgid)).save_and_notify() + PatchHistory( + patch=p, + by=request.user, + what='Deleted annotation "%s" from %s' + % (annotation.annotationtext, annotation.msgid), + ).save_and_notify() p.set_modified() p.save() annotation.delete() - return 'OK' + return "OK" def parse_and_add_attachments(threadinfo, mailthread): for t in threadinfo: - if len(t['atts']): + if len(t["atts"]): # One or more attachments. For now, we're only actually going # to store and process the first one, even though the API gets # us all of them. - MailThreadAttachment.objects.get_or_create(mailthread=mailthread, - messageid=t['msgid'], - defaults={ - 'date': t['date'], - 'author': t['from'], - 'attachmentid': t['atts'][0]['id'], - 'filename': t['atts'][0]['name'], - }) + MailThreadAttachment.objects.get_or_create( + mailthread=mailthread, + messageid=t["msgid"], + defaults={ + "date": t["date"], + "author": t["from"], + "attachmentid": t["atts"][0]["id"], + "filename": t["atts"][0]["name"], + }, + ) # In theory we should remove objects if they don't have an # attachment, but how could that ever happen? Ignore for now. @transaction.atomic def attachThread(request): - cf = get_object_or_404(CommitFest, pk=int(request.POST['cf'])) - patch = get_object_or_404(Patch, pk=int(request.POST['p']), commitfests=cf) - msgid = request.POST['msg'] + cf = get_object_or_404(CommitFest, pk=int(request.POST["cf"])) + patch = get_object_or_404(Patch, pk=int(request.POST["p"]), commitfests=cf) + msgid = request.POST["msg"] return doAttachThread(cf, patch, msgid, request.user) def doAttachThread(cf, patch, msgid, user): # Note! Must be called in an open transaction! - r = sorted(_archivesAPI('/message-id.json/%s' % msgid), key=lambda x: x['date']) + r = sorted(_archivesAPI("/message-id.json/%s" % msgid), key=lambda x: x["date"]) # We have the full thread metadata - using the first and last entry, # construct a new mailthread in our own model. # First, though, check if it's already there. - threads = MailThread.objects.filter(messageid=r[0]['msgid']) + threads = MailThread.objects.filter(messageid=r[0]["msgid"]) if len(threads): thread = threads[0] if thread.patches.filter(id=patch.id).exists(): - return 'This thread is already added to this email' + return "This thread is already added to this email" # We did not exist, so we'd better add ourselves. # While at it, we update the thread entry with the latest data from the # archives. thread.patches.add(patch) - thread.latestmessage = r[-1]['date'] - thread.latestauthor = r[-1]['from'] - thread.latestsubject = r[-1]['subj'] - thread.latestmsgid = r[-1]['msgid'] + thread.latestmessage = r[-1]["date"] + thread.latestauthor = r[-1]["from"] + thread.latestsubject = r[-1]["subj"] + thread.latestmsgid = r[-1]["msgid"] thread.save() else: # No existing thread existed, so create it # Now create a new mailthread entry - m = MailThread(messageid=r[0]['msgid'], - subject=r[0]['subj'], - firstmessage=r[0]['date'], - firstauthor=r[0]['from'], - latestmessage=r[-1]['date'], - latestauthor=r[-1]['from'], - latestsubject=r[-1]['subj'], - latestmsgid=r[-1]['msgid'], - ) + m = MailThread( + messageid=r[0]["msgid"], + subject=r[0]["subj"], + firstmessage=r[0]["date"], + firstauthor=r[0]["from"], + latestmessage=r[-1]["date"], + latestauthor=r[-1]["from"], + latestsubject=r[-1]["subj"], + latestmsgid=r[-1]["msgid"], + ) m.save() m.patches.add(patch) m.save() parse_and_add_attachments(r, m) - PatchHistory(patch=patch, by=user, what='Attached mail thread %s' % r[0]['msgid']).save_and_notify() + PatchHistory( + patch=patch, by=user, what="Attached mail thread %s" % r[0]["msgid"] + ).save_and_notify() patch.update_lastmail() patch.set_modified() patch.save() - return 'OK' + return "OK" @transaction.atomic def detachThread(request): - cf = get_object_or_404(CommitFest, pk=int(request.POST['cf'])) - patch = get_object_or_404(Patch, pk=int(request.POST['p']), commitfests=cf) - thread = get_object_or_404(MailThread, messageid=request.POST['msg']) + cf = get_object_or_404(CommitFest, pk=int(request.POST["cf"])) + patch = get_object_or_404(Patch, pk=int(request.POST["p"]), commitfests=cf) + thread = get_object_or_404(MailThread, messageid=request.POST["msg"]) patch.mailthread_set.remove(thread) - PatchHistory(patch=patch, by=request.user, what='Detached mail thread %s' % request.POST['msg']).save_and_notify() + PatchHistory( + patch=patch, + by=request.user, + what="Detached mail thread %s" % request.POST["msg"], + ).save_and_notify() patch.update_lastmail() patch.set_modified() patch.save() - return 'OK' + return "OK" def searchUsers(request): if not request.user.is_staff: return [] - if request.GET.get('s', ''): - return user_search(request.GET['s']) + if request.GET.get("s", ""): + return user_search(request.GET["s"]) else: return [] @@ -239,35 +294,36 @@ def importUser(request): if not request.user.is_staff: raise Http404() - if request.GET.get('u', ''): - u = user_search(userid=request.GET['u']) + if request.GET.get("u", ""): + u = user_search(userid=request.GET["u"]) if len(u) != 1: return "Internal error, duplicate user found" u = u[0] - if User.objects.filter(username=u['u']).exists(): + if User.objects.filter(username=u["u"]).exists(): return "User already exists" - User(username=u['u'], - first_name=u['f'], - last_name=u['l'], - email=u['e'], - password='setbypluginnotsha1', - ).save() - return 'OK' + User( + username=u["u"], + first_name=u["f"], + last_name=u["l"], + email=u["e"], + password="setbypluginnotsha1", + ).save() + return "OK" else: raise Http404() _ajax_map = { - 'getThreads': getThreads, - 'getMessages': getMessages, - 'attachThread': attachThread, - 'detachThread': detachThread, - 'annotateMessage': annotateMessage, - 'deleteAnnotation': deleteAnnotation, - 'searchUsers': searchUsers, - 'importUser': importUser, + "getThreads": getThreads, + "getMessages": getMessages, + "attachThread": attachThread, + "detachThread": detachThread, + "annotateMessage": annotateMessage, + "deleteAnnotation": deleteAnnotation, + "searchUsers": searchUsers, + "importUser": importUser, } @@ -278,8 +334,8 @@ def main(request, command): if command not in _ajax_map: raise Http404 try: - resp = HttpResponse(content_type='application/json') + resp = HttpResponse(content_type="application/json") json.dump(_ajax_map[command](request), resp) return resp except Http503 as e: - return HttpResponseServiceUnavailable(e, content_type='text/plain') + return HttpResponseServiceUnavailable(e, content_type="text/plain") diff --git a/pgcommitfest/commitfest/apps.py b/pgcommitfest/commitfest/apps.py index e47efed8..7dbe4cb2 100644 --- a/pgcommitfest/commitfest/apps.py +++ b/pgcommitfest/commitfest/apps.py @@ -2,7 +2,7 @@ class CFAppConfig(AppConfig): - name = 'pgcommitfest.commitfest' + name = "pgcommitfest.commitfest" def ready(self): from pgcommitfest.auth import auth_user_data_received diff --git a/pgcommitfest/commitfest/feeds.py b/pgcommitfest/commitfest/feeds.py index aa950fb3..9aff9025 100644 --- a/pgcommitfest/commitfest/feeds.py +++ b/pgcommitfest/commitfest/feeds.py @@ -2,15 +2,17 @@ class ActivityFeed(Feed): - title = description = 'Commitfest Activity Log' - link = 'https://commitfest.postgresql.org/' + title = description = "Commitfest Activity Log" + link = "https://commitfest.postgresql.org/" def __init__(self, activity, cf, *args, **kwargs): super(ActivityFeed, self).__init__(*args, **kwargs) self.activity = activity if cf: self.cfid = cf.id - self.title = self.description = 'PostgreSQL Commitfest {0} Activity Log'.format(cf.name) + self.title = self.description = ( + "PostgreSQL Commitfest {0} Activity Log".format(cf.name) + ) else: self.cfid = None @@ -18,16 +20,22 @@ def items(self): return self.activity def item_title(self, item): - return item['name'] + return item["name"] def item_description(self, item): - return "
Patch: {name}
User: {by}
\n
{what}
".format(**item) + return ( + "
Patch: {name}
User: {by}
\n
{what}
".format( + **item + ) + ) def item_link(self, item): if self.cfid: - return 'https://commitfest.postgresql.org/{0}/{1}/'.format(self.cfid, item['patchid']) + return "https://commitfest.postgresql.org/{0}/{1}/".format( + self.cfid, item["patchid"] + ) else: - return 'https://commitfest.postgresql.org/{cfid}/{patchid}/'.format(**item) + return "https://commitfest.postgresql.org/{cfid}/{patchid}/".format(**item) def item_pubdate(self, item): - return item['date'] + return item["date"] diff --git a/pgcommitfest/commitfest/fixtures/archive_data.json b/pgcommitfest/commitfest/fixtures/archive_data.json new file mode 100644 index 00000000..680ea086 --- /dev/null +++ b/pgcommitfest/commitfest/fixtures/archive_data.json @@ -0,0 +1,602 @@ +[ + { + "msgid": "example@message-0", + "date": "2025-01-20T14:20:10", + "from": "test@test.com", + "subj": "Re: Sample rate added to pg_stat_statements" + }, + { + "msgid": "example@message-1", + "date": "2025-01-20T14:01:53", + "from": "test@test.com", + "subj": "Re: [PATCH] Add get_bytes() and set_bytes() functions" + }, + { + "msgid": "example@message-2", + "date": "2025-01-20T13:49:45", + "from": "test@test.com", + "subj": "pg_stat_statements: improve loading and saving routines for the dump\n file" + }, + { + "msgid": "example@message-3", + "date": "2025-01-20T13:26:55", + "from": "test@test.com", + "subj": "Re: per backend I/O statistics" + }, + { + "msgid": "example@message-4", + "date": "2025-01-20T12:44:40", + "from": "test@test.com", + "subj": "Re: create subscription with (origin = none, copy_data = on)" + }, + { + "msgid": "example@message-5", + "date": "2025-01-20T11:10:40", + "from": "test@test.com", + "subj": "Re: per backend I/O statistics" + }, + { + "msgid": "example@message-6", + "date": "2025-01-20T08:21:35", + "from": "test@test.com", + "subj": "Re: Statistics Import and Export" + }, + { + "msgid": "example@message-7", + "date": "2025-01-20T08:03:54", + "from": "test@test.com", + "subj": "Re: Introduce XID age and inactive timeout based replication slot invalidation" + }, + { + "msgid": "example@message-8", + "date": "2025-01-20T06:53:39", + "from": "test@test.com", + "subj": "RE: Conflict detection for update_deleted in logical replication" + }, + { + "msgid": "example@message-9", + "date": "2025-01-20T06:49:41", + "from": "test@test.com", + "subj": "Re: Adding a '--two-phase' option to 'pg_createsubscriber' utility." + }, + { + "msgid": "example@message-10", + "date": "2025-01-20T06:34:41", + "from": "test@test.com", + "subj": "Re: per backend I/O statistics" + }, + { + "msgid": "example@message-11", + "date": "2025-01-20T05:56:21", + "from": "test@test.com", + "subj": "Re: [PATCH] immediately kill psql process if server is not running." + }, + { + "msgid": "example@message-12", + "date": "2025-01-20T05:33:23", + "from": "test@test.com", + "subj": "Re: connection establishment versus parallel workers" + }, + { + "msgid": "example@message-13", + "date": "2025-01-20T05:32:07", + "from": "test@test.com", + "subj": "Re: Pgoutput not capturing the generated columns" + }, + { + "msgid": "example@message-14", + "date": "2025-01-20T04:10:41", + "from": "test@test.com", + "subj": "Re: Pgoutput not capturing the generated columns" + }, + { + "msgid": "example@message-15", + "date": "2025-01-20T04:01:27", + "from": "test@test.com", + "subj": "int64 support in List API" + }, + { + "msgid": "example@message-16", + "date": "2025-01-19T23:55:17", + "from": "test@test.com", + "subj": "Re: Add RESPECT/IGNORE NULLS and FROM FIRST/LAST options" + }, + { + "msgid": "example@message-17", + "date": "2025-01-19T23:47:14", + "from": "test@test.com", + "subj": "Re: attndims, typndims still not enforced, but make the value within a sane threshold" + }, + { + "msgid": "example@message-18", + "date": "2025-01-19T15:50:49", + "from": "test@test.com", + "subj": "Re: Parallel heap vacuum" + }, + { + "msgid": "example@message-19", + "date": "2025-01-19T14:56:49", + "from": "test@test.com", + "subj": "Re: [RFC] Lock-free XLog Reservation from WAL" + }, + { + "msgid": "example@message-20", + "date": "2025-01-19T12:16:49", + "from": "test@test.com", + "subj": "Re: Pgoutput not capturing the generated columns" + }, + { + "msgid": "example@message-21", + "date": "2025-01-19T09:33:55", + "from": "test@test.com", + "subj": "Re: Add XMLNamespaces to XMLElement" + }, + { + "msgid": "example@message-22", + "date": "2025-01-19T00:11:32", + "from": "test@test.com", + "subj": "Get rid of WALBufMappingLock" + }, + { + "msgid": "example@message-23", + "date": "2025-01-18T23:42:50", + "from": "test@test.com", + "subj": "Re: improve DEBUG1 logging of parallel workers for CREATE INDEX?" + }, + { + "msgid": "example@message-24", + "date": "2025-01-18T20:37:54", + "from": "test@test.com", + "subj": "Re: Adding comments to help understand psql hidden queries" + }, + { + "msgid": "example@message-25", + "date": "2025-01-18T19:44:00", + "from": "test@test.com", + "subj": "Re: Coccinelle for PostgreSQL development [1/N]: coccicheck.py" + }, + { + "msgid": "example@message-26", + "date": "2025-01-18T17:32:10", + "from": "test@test.com", + "subj": "Re: Replace current implementations in crypt() and gen_salt() to\n OpenSSL" + }, + { + "msgid": "example@message-27", + "date": "2025-01-18T17:00:04", + "from": "test@test.com", + "subj": "Re: Statistics Import and Export" + }, + { + "msgid": "example@message-28", + "date": "2025-01-18T16:51:08", + "from": "test@test.com", + "subj": "Re: Confine vacuum skip logic to lazy_scan_skip" + }, + { + "msgid": "example@message-29", + "date": "2025-01-18T14:18:00", + "from": "test@test.com", + "subj": "Re: Revisiting {CREATE INDEX, REINDEX} CONCURRENTLY improvements" + }, + { + "msgid": "example@message-30", + "date": "2025-01-18T12:59:35", + "from": "test@test.com", + "subj": "Re: Issues with ON CONFLICT UPDATE and REINDEX CONCURRENTLY" + }, + { + "msgid": "example@message-31", + "date": "2025-01-18T07:14:02", + "from": "test@test.com", + "subj": "Re: Old BufferDesc refcount in PrintBufferDescs and PrintPinnedBufs" + }, + { + "msgid": "example@message-32", + "date": "2025-01-18T06:42:15", + "from": "test@test.com", + "subj": "Re: Collation & ctype method table, and extension hooks" + }, + { + "msgid": "example@message-33", + "date": "2025-01-18T05:01:27", + "from": "test@test.com", + "subj": "Re: create subscription with (origin = none, copy_data = on)" + }, + { + "msgid": "example@message-34", + "date": "2025-01-18T03:45:13", + "from": "test@test.com", + "subj": "RE: Conflict detection for update_deleted in logical replication" + }, + { + "msgid": "example@message-35", + "date": "2025-01-18T02:02:03", + "from": "test@test.com", + "subj": "Re: Old BufferDesc refcount in PrintBufferDescs and PrintPinnedBufs" + }, + { + "msgid": "example@message-36", + "date": "2025-01-18T01:23:19", + "from": "test@test.com", + "subj": "rename es_epq_active to es_epqstate" + }, + { + "msgid": "example@message-37", + "date": "2025-01-18T01:11:41", + "from": "test@test.com", + "subj": "Re: pg_trgm comparison bug on cross-architecture replication due to\n different char implementation" + }, + { + "msgid": "example@message-38", + "date": "2025-01-18T00:34:43", + "from": "test@test.com", + "subj": "Re: Add CASEFOLD() function." + }, + { + "msgid": "example@message-39", + "date": "2025-01-18T00:27:43", + "from": "test@test.com", + "subj": "Re: [PATCH] Add roman support for to_number function" + }, + { + "msgid": "example@message-40", + "date": "2025-01-17T22:11:56", + "from": "test@test.com", + "subj": "Old BufferDesc refcount in PrintBufferDescs and PrintPinnedBufs" + }, + { + "msgid": "example@message-41", + "date": "2025-01-17T20:44:01", + "from": "test@test.com", + "subj": "Re: Bug in detaching a partition with a foreign key." + }, + { + "msgid": "example@message-42", + "date": "2025-01-17T19:02:15", + "from": "test@test.com", + "subj": "Re: [PoC] Federated Authn/z with OAUTHBEARER" + }, + { + "msgid": "example@message-43", + "date": "2025-01-17T16:43:29", + "from": "test@test.com", + "subj": "Re: Add RESPECT/IGNORE NULLS and FROM FIRST/LAST options" + }, + { + "msgid": "example@message-44", + "date": "2025-01-17T16:01:53", + "from": "test@test.com", + "subj": "Re: Accept recovery conflict interrupt on blocked writing" + }, + { + "msgid": "example@message-45", + "date": "2025-01-17T15:45:46", + "from": "test@test.com", + "subj": "Re: Set AUTOCOMMIT to on in script output by pg_dump" + }, + { + "msgid": "example@message-46", + "date": "2025-01-17T15:42:13", + "from": "test@test.com", + "subj": "Re: POC: track vacuum/analyze cumulative time per relation" + }, + { + "msgid": "example@message-47", + "date": "2025-01-17T15:40:54", + "from": "test@test.com", + "subj": "Re: pure parsers and reentrant scanners" + }, + { + "msgid": "example@message-48", + "date": "2025-01-17T14:20:12", + "from": "test@test.com", + "subj": "Re: Statistics Import and Export" + }, + { + "msgid": "example@message-49", + "date": "2025-01-17T12:50:15", + "from": "test@test.com", + "subj": "Re: NOT ENFORCED constraint feature" + }, + { + "msgid": "example@message-50", + "date": "2025-01-17T12:03:09", + "from": "test@test.com", + "subj": "Re: Bypassing cursors in postgres_fdw to enable parallel plans" + }, + { + "msgid": "example@message-51", + "date": "2025-01-17T10:23:48", + "from": "test@test.com", + "subj": "Re: per backend I/O statistics" + }, + { + "msgid": "example@message-52", + "date": "2025-01-17T09:29:50", + "from": "test@test.com", + "subj": "Re: Add “FOR UPDATE NOWAIT” lock details to the log." + }, + { + "msgid": "example@message-53", + "date": "2025-01-17T08:30:04", + "from": "test@test.com", + "subj": "create subscription with (origin = none, copy_data = on)" + }, + { + "msgid": "example@message-54", + "date": "2025-01-17T07:18:20", + "from": "test@test.com", + "subj": "Re: Re: proposal: schema variables" + }, + { + "msgid": "example@message-55", + "date": "2025-01-17T07:15:34", + "from": "test@test.com", + "subj": "Re: SQLFunctionCache and generic plans" + }, + { + "msgid": "example@message-56", + "date": "2025-01-17T05:05:41", + "from": "test@test.com", + "subj": "Re: Some ExecSeqScan optimizations" + }, + { + "msgid": "example@message-57", + "date": "2025-01-17T05:00:49", + "from": "test@test.com", + "subj": "Remove XLogRecGetFullXid() in xlogreader.c?" + }, + { + "msgid": "example@message-58", + "date": "2025-01-17T04:22:07", + "from": "test@test.com", + "subj": "Re: Adding a '--two-phase' option to 'pg_createsubscriber' utility." + }, + { + "msgid": "example@message-59", + "date": "2025-01-17T03:18:45", + "from": "test@test.com", + "subj": "Automatic update of time column" + }, + { + "msgid": "example@message-60", + "date": "2025-01-17T01:06:14", + "from": "test@test.com", + "subj": "Re: Parallel heap vacuum" + }, + { + "msgid": "example@message-61", + "date": "2025-01-17T01:05:53", + "from": "test@test.com", + "subj": "Timeline issue if StartupXLOG() is interrupted right before\n end-of-recovery record is done" + }, + { + "msgid": "example@message-62", + "date": "2025-01-16T22:50:14", + "from": "test@test.com", + "subj": "Re: Trigger more frequent autovacuums of heavy insert tables" + }, + { + "msgid": "example@message-63", + "date": "2025-01-16T22:41:06", + "from": "test@test.com", + "subj": "Re: Document NULL" + }, + { + "msgid": "example@message-64", + "date": "2025-01-16T21:43:49", + "from": "test@test.com", + "subj": "Re: Trigger more frequent autovacuums of heavy insert tables" + }, + { + "msgid": "example@message-65", + "date": "2025-01-16T20:52:54", + "from": "test@test.com", + "subj": "Re: An improvement of ProcessTwoPhaseBuffer logic" + }, + { + "msgid": "example@message-66", + "date": "2025-01-16T19:38:21", + "from": "test@test.com", + "subj": "Re: Document How Commit Handles Aborted Transactions" + }, + { + "msgid": "example@message-67", + "date": "2025-01-16T18:42:32", + "from": "test@test.com", + "subj": "Re: Non-text mode for pg_dumpall" + }, + { + "msgid": "example@message-68", + "date": "2025-01-16T15:59:31", + "from": "test@test.com", + "subj": "Re: per backend WAL statistics" + }, + { + "msgid": "example@message-69", + "date": "2025-01-16T14:14:25", + "from": "test@test.com", + "subj": "Re: [PATCH] Add sortsupport for range types and btree_gist" + }, + { + "msgid": "example@message-70", + "date": "2025-01-16T13:53:31", + "from": "test@test.com", + "subj": "Bug in detaching a partition with a foreign key." + }, + { + "msgid": "example@message-71", + "date": "2025-01-16T13:52:46", + "from": "test@test.com", + "subj": "Increase NUM_XLOGINSERT_LOCKS" + }, + { + "msgid": "example@message-72", + "date": "2025-01-16T13:32:09", + "from": "test@test.com", + "subj": "Re: POC: make mxidoff 64 bits" + }, + { + "msgid": "example@message-73", + "date": "2025-01-16T13:24:41", + "from": "test@test.com", + "subj": "Re: Accept recovery conflict interrupt on blocked writing" + }, + { + "msgid": "example@message-74", + "date": "2025-01-16T11:16:06", + "from": "test@test.com", + "subj": "Re: Adding a '--two-phase' option to 'pg_createsubscriber' utility." + }, + { + "msgid": "example@message-75", + "date": "2025-01-16T10:54:53", + "from": "test@test.com", + "subj": "Re: Change GUC hashtable to use simplehash?" + }, + { + "msgid": "example@message-76", + "date": "2025-01-16T10:54:22", + "from": "test@test.com", + "subj": "Re: Psql meta-command conninfo+" + }, + { + "msgid": "example@message-77", + "date": "2025-01-16T08:47:08", + "from": "test@test.com", + "subj": "Re: Pgoutput not capturing the generated columns" + }, + { + "msgid": "example@message-78", + "date": "2025-01-16T08:44:18", + "from": "test@test.com", + "subj": "Re: Non-text mode for pg_dumpall" + }, + { + "msgid": "example@message-79", + "date": "2025-01-16T08:40:51", + "from": "test@test.com", + "subj": "Re: Show WAL write and fsync stats in pg_stat_io" + }, + { + "msgid": "example@message-80", + "date": "2025-01-16T07:50:09", + "from": "test@test.com", + "subj": "Re: An improvement of ProcessTwoPhaseBuffer logic" + }, + { + "msgid": "example@message-81", + "date": "2025-01-16T07:21:13", + "from": "test@test.com", + "subj": "Re: XMLDocument (SQL/XML X030)" + }, + { + "msgid": "example@message-82", + "date": "2025-01-16T07:05:16", + "from": "test@test.com", + "subj": "Re: Introduce XID age and inactive timeout based replication slot invalidation" + }, + { + "msgid": "example@message-83", + "date": "2025-01-16T07:04:23", + "from": "test@test.com", + "subj": "Re: Log a warning in pg_createsubscriber for max_slot_wal_keep_size" + }, + { + "msgid": "example@message-84", + "date": "2025-01-16T05:38:19", + "from": "test@test.com", + "subj": "Re: TOAST versus toast" + }, + { + "msgid": "example@message-85", + "date": "2025-01-16T05:17:39", + "from": "test@test.com", + "subj": "Re: Log a warning in pg_createsubscriber for max_slot_wal_keep_size" + }, + { + "msgid": "example@message-86", + "date": "2025-01-16T05:13:08", + "from": "test@test.com", + "subj": "Re: Make pg_stat_io view count IOs as bytes instead of blocks" + }, + { + "msgid": "example@message-87", + "date": "2025-01-16T04:14:31", + "from": "test@test.com", + "subj": "Re: Make pg_stat_io view count IOs as bytes instead of blocks" + }, + { + "msgid": "example@message-88", + "date": "2025-01-16T03:57:49", + "from": "test@test.com", + "subj": "TOAST versus toast" + }, + { + "msgid": "example@message-89", + "date": "2025-01-16T02:19:49", + "from": "test@test.com", + "subj": "Limit length of queryies in pg_stat_statement extension" + }, + { + "msgid": "example@message-90", + "date": "2025-01-16T01:45:15", + "from": "test@test.com", + "subj": "Re: Confine vacuum skip logic to lazy_scan_skip" + }, + { + "msgid": "example@message-91", + "date": "2025-01-16T01:15:31", + "from": "test@test.com", + "subj": "Re: Change GUC hashtable to use simplehash?" + }, + { + "msgid": "example@message-92", + "date": "2025-01-16T01:12:51", + "from": "test@test.com", + "subj": "Fix misuse use of pg_b64_encode function (contrib/postgres_fdw/connection.c)" + }, + { + "msgid": "example@message-93", + "date": "2025-01-16T01:00:51", + "from": "test@test.com", + "subj": "Re: An improvement of ProcessTwoPhaseBuffer logic" + }, + { + "msgid": "example@message-94", + "date": "2025-01-16T00:42:49", + "from": "test@test.com", + "subj": "Re: Infinite loop in XLogPageRead() on standby" + }, + { + "msgid": "example@message-95", + "date": "2025-01-15T23:47:51", + "from": "test@test.com", + "subj": "Re: convert libpgport's pqsignal() to a void function" + }, + { + "msgid": "example@message-96", + "date": "2025-01-15T22:20:58", + "from": "test@test.com", + "subj": "Re: Use Python \"Limited API\" in PL/Python" + }, + { + "msgid": "example@message-97", + "date": "2025-01-15T20:56:04", + "from": "test@test.com", + "subj": "Re: Statistics Import and Export" + }, + { + "msgid": "example@message-98", + "date": "2025-01-15T20:55:52", + "from": "test@test.com", + "subj": "Re: Eagerly scan all-visible pages to amortize aggressive vacuum" + }, + { + "msgid": "example@message-99", + "date": "2025-01-15T20:35:41", + "from": "test@test.com", + "subj": "Re: Add XMLNamespaces to XMLElement" + } +] diff --git a/pgcommitfest/commitfest/fixtures/auth_data.json b/pgcommitfest/commitfest/fixtures/auth_data.json new file mode 100644 index 00000000..bfaf3bfb --- /dev/null +++ b/pgcommitfest/commitfest/fixtures/auth_data.json @@ -0,0 +1,20 @@ +[ +{ + "model": "auth.user", + "pk": 1, + "fields": { + "password": "pbkdf2_sha256$600000$49rgHaLmmFQUm7c663LCrU$i68PFeI493lPmgNx/RHnWNuw4ZRzzvJWNqU4os5VnF4=", + "last_login": "2025-01-26T10:43:07.735", + "is_superuser": true, + "username": "admin", + "first_name": "", + "last_name": "", + "email": "test@test.com", + "is_staff": true, + "is_active": true, + "date_joined": "2025-01-20T15:47:04.132", + "groups": [], + "user_permissions": [] + } +} +] diff --git a/pgcommitfest/commitfest/fixtures/commitfest_data.json b/pgcommitfest/commitfest/fixtures/commitfest_data.json new file mode 100644 index 00000000..7bd54001 --- /dev/null +++ b/pgcommitfest/commitfest/fixtures/commitfest_data.json @@ -0,0 +1,619 @@ +[ +{ + "model": "commitfest.commitfest", + "pk": 1, + "fields": { + "name": "Sample Old Commitfest", + "status": 4, + "startdate": "2024-05-01", + "enddate": "2024-05-31" + } +}, +{ + "model": "commitfest.commitfest", + "pk": 2, + "fields": { + "name": "Sample In Progress Commitfest", + "status": 3, + "startdate": "2025-01-01", + "enddate": "2025-02-28" + } +}, +{ + "model": "commitfest.commitfest", + "pk": 3, + "fields": { + "name": "Sample Open Commitfest", + "status": 2, + "startdate": "2025-03-01", + "enddate": "2025-03-31" + } +}, +{ + "model": "commitfest.commitfest", + "pk": 4, + "fields": { + "name": "Sample Future Commitfest", + "status": 1, + "startdate": "2025-05-01", + "enddate": "2025-05-31" + } +}, +{ + "model": "commitfest.topic", + "pk": 1, + "fields": { + "topic": "Bugs" + } +}, +{ + "model": "commitfest.topic", + "pk": 2, + "fields": { + "topic": "Performance" + } +}, +{ + "model": "commitfest.topic", + "pk": 3, + "fields": { + "topic": "Miscellaneous" + } +}, +{ + "model": "commitfest.targetversion", + "pk": 1, + "fields": { + "version": "18" + } +}, +{ + "model": "commitfest.patch", + "pk": 1, + "fields": { + "name": "Conflict detection for update_deleted in logical replication", + "topic": 1, + "wikilink": "", + "gitlink": "", + "targetversion": null, + "committer": null, + "created": "2025-01-26T10:48:31.579", + "modified": "2025-01-26T10:53:20.498", + "lastmail": "2025-01-20T06:53:39", + "authors": [ + 1 + ], + "reviewers": [], + "subscribers": [], + "mailthread_set": [ + 1 + ] + } +}, +{ + "model": "commitfest.patch", + "pk": 2, + "fields": { + "name": "Sample rate added to pg_stat_statements", + "topic": 3, + "wikilink": "", + "gitlink": "", + "targetversion": null, + "committer": null, + "created": "2025-01-26T10:51:17.305", + "modified": "2025-01-26T10:51:19.631", + "lastmail": "2025-01-20T14:20:10", + "authors": [], + "reviewers": [], + "subscribers": [], + "mailthread_set": [ + 2 + ] + } +}, +{ + "model": "commitfest.patch", + "pk": 3, + "fields": { + "name": "Per Backend I/O statistics", + "topic": 3, + "wikilink": "", + "gitlink": "", + "targetversion": null, + "committer": null, + "created": "2025-01-26T11:02:07.467", + "modified": "2025-01-26T11:02:10.911", + "lastmail": "2025-01-20T13:26:55", + "authors": [], + "reviewers": [], + "subscribers": [], + "mailthread_set": [ + 3 + ] + } +}, +{ + "model": "commitfest.patch", + "pk": 4, + "fields": { + "name": "create subscription with (origin = none, copy_data = none)", + "topic": 3, + "wikilink": "", + "gitlink": "", + "targetversion": null, + "committer": null, + "created": "2025-01-31T13:30:19.744", + "modified": "2025-01-31T13:30:21.305", + "lastmail": "2025-01-20T12:44:40", + "authors": [], + "reviewers": [], + "subscribers": [], + "mailthread_set": [ + 4 + ] + } +}, +{ + "model": "commitfest.patchoncommitfest", + "pk": 1, + "fields": { + "patch": 1, + "commitfest": 2, + "enterdate": "2025-01-26T10:48:31.579", + "leavedate": null, + "status": 3 + } +}, +{ + "model": "commitfest.patchoncommitfest", + "pk": 2, + "fields": { + "patch": 2, + "commitfest": 2, + "enterdate": "2025-01-26T10:51:17.305", + "leavedate": null, + "status": 1 + } +}, +{ + "model": "commitfest.patchoncommitfest", + "pk": 3, + "fields": { + "patch": 1, + "commitfest": 1, + "enterdate": "2024-04-01T10:52:24", + "leavedate": "2024-06-05T10:52:34", + "status": 5 + } +}, +{ + "model": "commitfest.patchoncommitfest", + "pk": 4, + "fields": { + "patch": 3, + "commitfest": 3, + "enterdate": "2025-01-26T11:02:07.467", + "leavedate": null, + "status": 1 + } +}, +{ + "model": "commitfest.patchoncommitfest", + "pk": 5, + "fields": { + "patch": 4, + "commitfest": 2, + "enterdate": "2025-01-31T13:30:19.745", + "leavedate": null, + "status": 1 + } +}, +{ + "model": "commitfest.patchhistory", + "pk": 1, + "fields": { + "patch": 1, + "date": "2025-01-26T10:48:31.580", + "by": 1, + "by_cfbot": false, + "what": "Created patch record" + } +}, +{ + "model": "commitfest.patchhistory", + "pk": 2, + "fields": { + "patch": 1, + "date": "2025-01-26T10:48:31.582", + "by": 1, + "by_cfbot": false, + "what": "Attached mail thread example@message-8" + } +}, +{ + "model": "commitfest.patchhistory", + "pk": 3, + "fields": { + "patch": 1, + "date": "2025-01-26T10:48:54.115", + "by": 1, + "by_cfbot": false, + "what": "Changed authors to (admin)" + } +}, +{ + "model": "commitfest.patchhistory", + "pk": 4, + "fields": { + "patch": 2, + "date": "2025-01-26T10:51:17.306", + "by": 1, + "by_cfbot": false, + "what": "Created patch record" + } +}, +{ + "model": "commitfest.patchhistory", + "pk": 5, + "fields": { + "patch": 2, + "date": "2025-01-26T10:51:17.307", + "by": 1, + "by_cfbot": false, + "what": "Attached mail thread example@message-0" + } +}, +{ + "model": "commitfest.patchhistory", + "pk": 6, + "fields": { + "patch": 1, + "date": "2025-01-26T10:53:20.498", + "by": 1, + "by_cfbot": false, + "what": "New status: Ready for Committer" + } +}, +{ + "model": "commitfest.patchhistory", + "pk": 7, + "fields": { + "patch": 3, + "date": "2025-01-26T11:02:07.468", + "by": 1, + "by_cfbot": false, + "what": "Created patch record" + } +}, +{ + "model": "commitfest.patchhistory", + "pk": 8, + "fields": { + "patch": 3, + "date": "2025-01-26T11:02:07.469", + "by": 1, + "by_cfbot": false, + "what": "Attached mail thread example@message-3" + } +}, +{ + "model": "commitfest.patchhistory", + "pk": 9, + "fields": { + "patch": 4, + "date": "2025-01-31T13:30:19.745", + "by": 1, + "by_cfbot": false, + "what": "Created patch record" + } +}, +{ + "model": "commitfest.patchhistory", + "pk": 10, + "fields": { + "patch": 4, + "date": "2025-01-31T13:30:19.748", + "by": 1, + "by_cfbot": false, + "what": "Attached mail thread example@message-4" + } +}, +{ + "model": "commitfest.mailthread", + "pk": 1, + "fields": { + "messageid": "example@message-8", + "subject": "RE: Conflict detection for update_deleted in logical replication", + "firstmessage": "2025-01-20T06:53:39", + "firstauthor": "test@test.com", + "latestmessage": "2025-01-20T06:53:39", + "latestauthor": "test@test.com", + "latestsubject": "RE: Conflict detection for update_deleted in logical replication", + "latestmsgid": "example@message-8" + } +}, +{ + "model": "commitfest.mailthread", + "pk": 2, + "fields": { + "messageid": "example@message-0", + "subject": "Re: Sample rate added to pg_stat_statements", + "firstmessage": "2025-01-20T14:20:10", + "firstauthor": "test@test.com", + "latestmessage": "2025-01-20T14:20:10", + "latestauthor": "test@test.com", + "latestsubject": "Re: Sample rate added to pg_stat_statements", + "latestmsgid": "example@message-0" + } +}, +{ + "model": "commitfest.mailthread", + "pk": 3, + "fields": { + "messageid": "example@message-3", + "subject": "Re: per backend I/O statistics", + "firstmessage": "2025-01-20T13:26:55", + "firstauthor": "test@test.com", + "latestmessage": "2025-01-20T13:26:55", + "latestauthor": "test@test.com", + "latestsubject": "Re: per backend I/O statistics", + "latestmsgid": "example@message-3" + } +}, +{ + "model": "commitfest.mailthread", + "pk": 4, + "fields": { + "messageid": "example@message-4", + "subject": "Re: create subscription with (origin = none, copy_data = on)", + "firstmessage": "2025-01-20T12:44:40", + "firstauthor": "test@test.com", + "latestmessage": "2025-01-20T12:44:40", + "latestauthor": "test@test.com", + "latestsubject": "Re: create subscription with (origin = none, copy_data = on)", + "latestmsgid": "example@message-4" + } +}, +{ + "model": "commitfest.patchstatus", + "pk": 1, + "fields": { + "statusstring": "Needs review", + "sortkey": 10 + } +}, +{ + "model": "commitfest.patchstatus", + "pk": 2, + "fields": { + "statusstring": "Waiting on Author", + "sortkey": 15 + } +}, +{ + "model": "commitfest.patchstatus", + "pk": 3, + "fields": { + "statusstring": "Ready for Committer", + "sortkey": 20 + } +}, +{ + "model": "commitfest.patchstatus", + "pk": 4, + "fields": { + "statusstring": "Committed", + "sortkey": 25 + } +}, +{ + "model": "commitfest.patchstatus", + "pk": 5, + "fields": { + "statusstring": "Moved to next CF", + "sortkey": 30 + } +}, +{ + "model": "commitfest.patchstatus", + "pk": 6, + "fields": { + "statusstring": "Rejected", + "sortkey": 50 + } +}, +{ + "model": "commitfest.patchstatus", + "pk": 7, + "fields": { + "statusstring": "Returned with Feedback", + "sortkey": 50 + } +}, +{ + "model": "commitfest.patchstatus", + "pk": 8, + "fields": { + "statusstring": "Withdrawn", + "sortkey": 50 + } +}, +{ + "model": "commitfest.cfbotbranch", + "pk": 1, + "fields": { + "branch_id": 123, + "branch_name": "cf/1", + "commit_id": "abcdefg", + "apply_url": "http://cfbot.cputube.org/patch_4573.log", + "status": "finished", + "needs_rebase_since": null, + "created": "2025-01-26T22:06:02.980", + "modified": "2025-01-29T22:50:37.805", + "version": "", + "patch_count": 5, + "first_additions": 10, + "first_deletions": 5, + "all_additions": 834, + "all_deletions": 128 + } +}, +{ + "model": "commitfest.cfbotbranch", + "pk": 2, + "fields": { + "branch_id": 345, + "branch_name": "cf/2", + "commit_id": null, + "apply_url": "http://cfbot.cputube.org/patch_4573.log", + "status": "failed", + "needs_rebase_since": null, + "created": "2025-01-26T22:11:09.961", + "modified": "2025-01-26T22:20:39.372", + "version": null, + "patch_count": null, + "first_additions": null, + "first_deletions": null, + "all_additions": null, + "all_deletions": null + } +}, +{ + "model": "commitfest.cfbotbranch", + "pk": 3, + "fields": { + "branch_id": 567, + "branch_name": "cf/3", + "commit_id": "abc123", + "apply_url": "http://cfbot.cputube.org/patch_4748.log", + "status": "failed", + "needs_rebase_since": null, + "created": "2025-01-26T22:22:46.602", + "modified": "2025-01-29T22:58:51.032", + "version": "", + "patch_count": 3, + "first_additions": 345, + "first_deletions": 158, + "all_additions": 8764, + "all_deletions": 345 + } +}, +{ + "model": "commitfest.cfbotbranch", + "pk": 4, + "fields": { + "branch_id": 76, + "branch_name": "cf/4", + "commit_id": "abcggg", + "apply_url": "http://cfbot.cputube.org/patch_4748.log", + "status": "testing", + "needs_rebase_since": null, + "created": "2025-01-31T13:32:22.017", + "modified": "2025-01-31T13:32:22.017", + "version": "", + "patch_count": 1, + "first_additions": 123, + "first_deletions": 14, + "all_additions": 123, + "all_deletions": 14 + } +}, +{ + "model": "commitfest.cfbottask", + "pk": 1, + "fields": { + "task_id": "12345", + "task_name": "Linux build", + "patch": 1, + "branch_id": 123, + "position": 1, + "status": "COMPLETED", + "created": "2025-01-26T22:06:49.237", + "modified": "2025-01-26T22:07:40.405" + } +}, +{ + "model": "commitfest.cfbottask", + "pk": 2, + "fields": { + "task_id": "12346", + "task_name": "MacOS Build", + "patch": 1, + "branch_id": 123, + "position": 2, + "status": "COMPLETED", + "created": "2025-01-26T22:07:32.041", + "modified": "2025-01-26T22:07:32.041" + } +}, +{ + "model": "commitfest.cfbottask", + "pk": 3, + "fields": { + "task_id": "4561", + "task_name": "Redhat", + "patch": 3, + "branch_id": 567, + "position": 1, + "status": "COMPLETED", + "created": "2025-01-26T22:24:37.445", + "modified": "2025-01-26T22:24:37.445" + } +}, +{ + "model": "commitfest.cfbottask", + "pk": 4, + "fields": { + "task_id": "4562", + "task_name": "MacOS build", + "patch": 3, + "branch_id": 567, + "position": 2, + "status": "EXECUTING", + "created": "2025-01-26T22:25:15.283", + "modified": "2025-01-26T22:27:09.055" + } +}, +{ + "model": "commitfest.cfbottask", + "pk": 5, + "fields": { + "task_id": "4563", + "task_name": "FreeBSD", + "patch": 3, + "branch_id": 567, + "position": 3, + "status": "FAILED", + "created": "2025-01-26T22:25:48.021", + "modified": "2025-01-26T22:25:48.021" + } +}, +{ + "model": "commitfest.cfbottask", + "pk": 6, + "fields": { + "task_id": "4564", + "task_name": "NetBSD", + "patch": 3, + "branch_id": 567, + "position": 4, + "status": "CREATED", + "created": "2025-01-26T22:29:09.156", + "modified": "2025-01-26T22:29:09.156" + } +}, +{ + "model": "commitfest.cfbottask", + "pk": 7, + "fields": { + "task_id": "4565", + "task_name": "Linux Valgrind", + "patch": 3, + "branch_id": 567, + "position": 5, + "status": "SCHEDULED", + "created": "2025-01-26T22:30:03.199", + "modified": "2025-01-26T22:30:03.199" + } +} +] diff --git a/pgcommitfest/commitfest/forms.py b/pgcommitfest/commitfest/forms.py index 61d90463..1353fc2d 100644 --- a/pgcommitfest/commitfest/forms.py +++ b/pgcommitfest/commitfest/forms.py @@ -1,13 +1,13 @@ from django import forms +from django.contrib.auth.models import User +from django.db.models import Q from django.forms import ValidationError from django.forms.widgets import HiddenInput -from django.db.models import Q -from django.contrib.auth.models import User from django.http import Http404 -from .models import Patch, MailThread, PatchOnCommitFest, TargetVersion -from .widgets import ThreadPickWidget from .ajax import _archivesAPI +from .models import MailThread, Patch, PatchOnCommitFest, TargetVersion +from .widgets import ThreadPickWidget class CommitFestFilterForm(forms.Form): @@ -21,36 +21,60 @@ class CommitFestFilterForm(forms.Form): def __init__(self, cf, *args, **kwargs): super(CommitFestFilterForm, self).__init__(*args, **kwargs) - self.fields['sortkey'].widget = forms.HiddenInput() + self.fields["sortkey"].widget = forms.HiddenInput() - c = [(-1, '* All')] + list(PatchOnCommitFest._STATUS_CHOICES) - self.fields['status'] = forms.ChoiceField(choices=c, required=False) + c = [(-1, "* All")] + list(PatchOnCommitFest._STATUS_CHOICES) + self.fields["status"] = forms.ChoiceField(choices=c, required=False) q = Q(patch_author__commitfests=cf) | Q(patch_reviewer__commitfests=cf) - userchoices = [(-1, '* All'), (-2, '* None'), (-3, '* Yourself')] + [(u.id, '%s %s (%s)' % (u.first_name, u.last_name, u.username)) for u in User.objects.filter(q).distinct().order_by('first_name', 'last_name')] - self.fields['targetversion'] = forms.ChoiceField(choices=[('-1', '* All'), ('-2', '* None')] + [(v.id, v.version) for v in TargetVersion.objects.all()], required=False, label="Target version") - self.fields['author'] = forms.ChoiceField(choices=userchoices, required=False) - self.fields['reviewer'] = forms.ChoiceField(choices=userchoices, required=False) - - for f in ('status', 'author', 'reviewer',): - self.fields[f].widget.attrs = {'class': 'input-medium'} + userchoices = [(-1, "* All"), (-2, "* None"), (-3, "* Yourself")] + [ + (u.id, "%s %s (%s)" % (u.first_name, u.last_name, u.username)) + for u in User.objects.filter(q) + .distinct() + .order_by("first_name", "last_name") + ] + self.fields["targetversion"] = forms.ChoiceField( + choices=[("-1", "* All"), ("-2", "* None")] + + [(v.id, v.version) for v in TargetVersion.objects.all()], + required=False, + label="Target version", + ) + self.fields["author"] = forms.ChoiceField(choices=userchoices, required=False) + self.fields["reviewer"] = forms.ChoiceField(choices=userchoices, required=False) + + for f in ( + "status", + "author", + "reviewer", + ): + self.fields[f].widget.attrs = {"class": "input-medium"} class PatchForm(forms.ModelForm): selectize_multiple_fields = { - 'authors': '/lookups/user', - 'reviewers': '/lookups/user', + "authors": "/lookups/user", + "reviewers": "/lookups/user", } class Meta: model = Patch - exclude = ('commitfests', 'mailthreads', 'modified', 'lastmail', 'subscribers', ) + exclude = ( + "commitfests", + "mailthread_set", + "modified", + "lastmail", + "subscribers", + ) def __init__(self, *args, **kwargs): super(PatchForm, self).__init__(*args, **kwargs) - self.fields['authors'].help_text = 'Enter part of name to see list' - self.fields['reviewers'].help_text = 'Enter part of name to see list' - self.fields['committer'].label_from_instance = lambda x: '%s %s (%s)' % (x.user.first_name, x.user.last_name, x.user.username) + self.fields["authors"].help_text = "Enter part of name to see list" + self.fields["reviewers"].help_text = "Enter part of name to see list" + self.fields["committer"].label_from_instance = lambda x: "%s %s (%s)" % ( + x.user.first_name, + x.user.last_name, + x.user.username, + ) # Selectize multiple fields -- don't pre-populate everything for field, url in list(self.selectize_multiple_fields.items()): @@ -64,89 +88,128 @@ def __init__(self, *args, **kwargs): vals = [o.pk for o in getattr(self.instance, field).all()] else: vals = [] - if 'data' in kwargs and str(field) in kwargs['data']: - vals.extend([x for x in kwargs['data'].getlist(field)]) - self.fields[field].widget.attrs['data-selecturl'] = url - self.fields[field].queryset = self.fields[field].queryset.filter(pk__in=set(vals)) - self.fields[field].label_from_instance = lambda u: '{} ({})'.format(u.username, u.get_full_name()) - + if "data" in kwargs and str(field) in kwargs["data"]: + vals.extend([x for x in kwargs["data"].getlist(field)]) + self.fields[field].widget.attrs["data-selecturl"] = url + self.fields[field].queryset = self.fields[field].queryset.filter( + pk__in=set(vals) + ) + self.fields[field].label_from_instance = lambda u: "{} ({})".format( + u.username, u.get_full_name() + ) + + +class NewPatchForm(PatchForm): + # Put threadmsgid first + field_order = ["threadmsgid"] + + threadmsgid = forms.CharField( + max_length=200, + required=True, + label="Specify thread msgid", + widget=ThreadPickWidget, + ) -class NewPatchForm(forms.ModelForm): - threadmsgid = forms.CharField(max_length=200, required=True, label='Specify thread msgid', widget=ThreadPickWidget) -# patchfile = forms.FileField(allow_empty_file=False, max_length=50000, label='or upload patch file', required=False, help_text='This may be supported sometime in the future, and would then autogenerate a mail to the hackers list. At such a time, the threadmsgid would no longer be required.') + def __init__(self, *args, **kwargs): + request = kwargs.pop("request", None) + super(NewPatchForm, self).__init__(*args, **kwargs) - class Meta: - model = Patch - fields = ('name', 'topic', ) + if request: + self.fields["authors"].queryset = User.objects.filter(pk=request.user.id) + self.fields["authors"].initial = [request.user.id] def clean_threadmsgid(self): try: - _archivesAPI('/message-id.json/%s' % self.cleaned_data['threadmsgid']) + _archivesAPI("/message-id.json/%s" % self.cleaned_data["threadmsgid"]) except Http404: raise ValidationError("Message not found in archives") except Exception: raise ValidationError("Error in API call to validate thread") - return self.cleaned_data['threadmsgid'] + return self.cleaned_data["threadmsgid"] def _fetch_thread_choices(patch): - for mt in patch.mailthread_set.order_by('-latestmessage'): - ti = sorted(_archivesAPI('/message-id.json/%s' % mt.messageid), key=lambda x: x['date'], reverse=True) - yield [mt.subject, - [('%s,%s' % (mt.messageid, t['msgid']), 'From %s at %s' % (t['from'], t['date'])) for t in ti]] + for mt in patch.mailthread_set.order_by("-latestmessage"): + ti = sorted( + _archivesAPI("/message-id.json/%s" % mt.messageid), + key=lambda x: x["date"], + reverse=True, + ) + yield [ + mt.subject, + [ + ( + "%s,%s" % (mt.messageid, t["msgid"]), + "From %s at %s" % (t["from"], t["date"]), + ) + for t in ti + ], + ] review_state_choices = ( - (0, 'Tested'), - (1, 'Passed'), + (0, "Tested"), + (1, "Passed"), ) def reviewfield(label): - return forms.MultipleChoiceField(choices=review_state_choices, label=label, widget=forms.CheckboxSelectMultiple, required=False) + return forms.MultipleChoiceField( + choices=review_state_choices, + label=label, + widget=forms.CheckboxSelectMultiple, + required=False, + ) class CommentForm(forms.Form): - responseto = forms.ChoiceField(choices=[], required=True, label='In response to') + responseto = forms.ChoiceField(choices=[], required=True, label="In response to") # Specific checkbox fields for reviews - review_installcheck = reviewfield('make installcheck-world') - review_implements = reviewfield('Implements feature') - review_spec = reviewfield('Spec compliant') - review_doc = reviewfield('Documentation') + review_installcheck = reviewfield("make installcheck-world") + review_implements = reviewfield("Implements feature") + review_spec = reviewfield("Spec compliant") + review_doc = reviewfield("Documentation") message = forms.CharField(required=True, widget=forms.Textarea) - newstatus = forms.ChoiceField(choices=PatchOnCommitFest.OPEN_STATUS_CHOICES(), label='New status') + newstatus = forms.ChoiceField( + choices=PatchOnCommitFest.OPEN_STATUS_CHOICES(), label="New status" + ) def __init__(self, patch, poc, is_review, *args, **kwargs): super(CommentForm, self).__init__(*args, **kwargs) self.is_review = is_review - self.fields['responseto'].choices = _fetch_thread_choices(patch) - self.fields['newstatus'].initial = poc.status + self.fields["responseto"].choices = _fetch_thread_choices(patch) + self.fields["newstatus"].initial = poc.status if not is_review: - del self.fields['review_installcheck'] - del self.fields['review_implements'] - del self.fields['review_spec'] - del self.fields['review_doc'] + del self.fields["review_installcheck"] + del self.fields["review_implements"] + del self.fields["review_spec"] + del self.fields["review_doc"] def clean_responseto(self): try: - (threadid, respid) = self.cleaned_data['responseto'].split(',') + (threadid, respid) = self.cleaned_data["responseto"].split(",") self.thread = MailThread.objects.get(messageid=threadid) self.respid = respid except MailThread.DoesNotExist: - raise ValidationError('Selected thread appears to no longer exist') + raise ValidationError("Selected thread appears to no longer exist") except Exception: - raise ValidationError('Invalid message selected') - return self.cleaned_data['responseto'] + raise ValidationError("Invalid message selected") + return self.cleaned_data["responseto"] def clean(self): if self.is_review: for fn, f in self.fields.items(): - if fn.startswith('review_') and fn in self.cleaned_data: - if '1' in self.cleaned_data[fn] and '0' not in self.cleaned_data[fn]: - self.errors[fn] = (('Cannot pass a test without performing it!'),) + if fn.startswith("review_") and fn in self.cleaned_data: + if ( + "1" in self.cleaned_data[fn] + and "0" not in self.cleaned_data[fn] + ): + self.errors[fn] = ( + ("Cannot pass a test without performing it!"), + ) return self.cleaned_data @@ -155,7 +218,7 @@ class BulkEmailForm(forms.Form): authors = forms.CharField(required=False, widget=HiddenInput()) subject = forms.CharField(required=True) body = forms.CharField(required=True, widget=forms.Textarea) - confirm = forms.BooleanField(required=True, label='Check to confirm sending') + confirm = forms.BooleanField(required=True, label="Check to confirm sending") def __init__(self, *args, **kwargs): super(BulkEmailForm, self).__init__(*args, **kwargs) diff --git a/pgcommitfest/commitfest/lookups.py b/pgcommitfest/commitfest/lookups.py index 229459c6..76700912 100644 --- a/pgcommitfest/commitfest/lookups.py +++ b/pgcommitfest/commitfest/lookups.py @@ -1,22 +1,35 @@ -from django.http import HttpResponse, Http404 -from django.db.models import Q from django.contrib.auth.decorators import login_required from django.contrib.auth.models import User +from django.db.models import Q +from django.http import Http404, HttpResponse import json @login_required def userlookup(request): - query = request.GET.get('query', None) + query = request.GET.get("query", None) if not query: raise Http404() users = User.objects.filter( Q(is_active=True), - Q(username__icontains=query) | Q(first_name__icontains=query) | Q(last_name__icontains=query), + Q(username__icontains=query) + | Q(first_name__icontains=query) + | Q(last_name__icontains=query), ) - return HttpResponse(json.dumps({ - 'values': [{'id': u.id, 'value': '{} ({})'.format(u.username, u.get_full_name())} for u in users], - }), content_type='application/json') + return HttpResponse( + json.dumps( + { + "values": [ + { + "id": u.id, + "value": "{} ({})".format(u.username, u.get_full_name()), + } + for u in users + ], + } + ), + content_type="application/json", + ) diff --git a/pgcommitfest/commitfest/management/commands/send_notifications.py b/pgcommitfest/commitfest/management/commands/send_notifications.py index cb2ef143..728c7f99 100644 --- a/pgcommitfest/commitfest/management/commands/send_notifications.py +++ b/pgcommitfest/commitfest/management/commands/send_notifications.py @@ -1,12 +1,10 @@ +from django.conf import settings from django.core.management.base import BaseCommand from django.db import transaction -from django.conf import settings - -from io import StringIO from pgcommitfest.commitfest.models import PendingNotification -from pgcommitfest.userprofile.models import UserProfile from pgcommitfest.mailqueue.util import send_template_mail +from pgcommitfest.userprofile.models import UserProfile class Command(BaseCommand): @@ -17,17 +15,24 @@ def handle(self, *args, **options): # Django doesn't do proper group by in the ORM, so we have to # build our own. matches = {} - for n in PendingNotification.objects.all().order_by('user', 'history__patch__id', 'history__id'): + for n in PendingNotification.objects.all().order_by( + "user", "history__patch__id", "history__id" + ): if n.user.id not in matches: - matches[n.user.id] = {'user': n.user, 'patches': {}} - if n.history.patch.id not in matches[n.user.id]['patches']: - matches[n.user.id]['patches'][n.history.patch.id] = {'patch': n.history.patch, 'entries': []} - matches[n.user.id]['patches'][n.history.patch.id]['entries'].append(n.history) + matches[n.user.id] = {"user": n.user, "patches": {}} + if n.history.patch.id not in matches[n.user.id]["patches"]: + matches[n.user.id]["patches"][n.history.patch.id] = { + "patch": n.history.patch, + "entries": [], + } + matches[n.user.id]["patches"][n.history.patch.id]["entries"].append( + n.history + ) n.delete() # Ok, now let's build emails from this for v in matches.values(): - user = v['user'] + user = v["user"] email = user.email try: if user.userprofile and user.userprofile.notifyemail: @@ -35,13 +40,14 @@ def handle(self, *args, **options): except UserProfile.DoesNotExist: pass - send_template_mail(settings.NOTIFICATION_FROM, - None, - email, - "PostgreSQL commitfest updates", - 'mail/patch_notify.txt', - { - 'user': user, - 'patches': v['patches'], - }, - ) + send_template_mail( + settings.NOTIFICATION_FROM, + None, + email, + "PostgreSQL commitfest updates", + "mail/patch_notify.txt", + { + "user": user, + "patches": v["patches"], + }, + ) diff --git a/pgcommitfest/commitfest/migrations/0001_initial.py b/pgcommitfest/commitfest/migrations/0001_initial.py index a58a5e18..aa688d7e 100644 --- a/pgcommitfest/commitfest/migrations/0001_initial.py +++ b/pgcommitfest/commitfest/migrations/0001_initial.py @@ -1,183 +1,327 @@ # -*- coding: utf-8 -*- from __future__ import unicode_literals -from django.db import migrations, models from django.conf import settings +from django.db import migrations, models + import pgcommitfest.commitfest.util class Migration(migrations.Migration): - dependencies = [ - ('auth', '0006_require_contenttypes_0002'), + ("auth", "0006_require_contenttypes_0002"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( - name='CommitFest', + name="CommitFest", fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('name', models.CharField(unique=True, max_length=100)), - ('status', models.IntegerField(default=1, choices=[(1, 'Future'), (2, 'Open'), (3, 'In Progress'), (4, 'Closed')])), - ('startdate', models.DateField(null=True, blank=True)), - ('enddate', models.DateField(null=True, blank=True)), + ( + "id", + models.AutoField( + verbose_name="ID", + serialize=False, + auto_created=True, + primary_key=True, + ), + ), + ("name", models.CharField(unique=True, max_length=100)), + ( + "status", + models.IntegerField( + default=1, + choices=[ + (1, "Future"), + (2, "Open"), + (3, "In Progress"), + (4, "Closed"), + ], + ), + ), + ("startdate", models.DateField(null=True, blank=True)), + ("enddate", models.DateField(null=True, blank=True)), ], options={ - 'ordering': ('-startdate',), - 'verbose_name_plural': 'Commitfests', + "ordering": ("-startdate",), + "verbose_name_plural": "Commitfests", }, ), migrations.CreateModel( - name='Committer', + name="Committer", fields=[ - ('user', models.OneToOneField(primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)), - ('active', models.BooleanField(default=True)), + ( + "user", + models.OneToOneField( + primary_key=True, + serialize=False, + to=settings.AUTH_USER_MODEL, + on_delete=models.CASCADE, + ), + ), + ("active", models.BooleanField(default=True)), ], options={ - 'ordering': ('user__last_name', 'user__first_name'), + "ordering": ("user__last_name", "user__first_name"), }, ), migrations.CreateModel( - name='MailThread', + name="MailThread", fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('messageid', models.CharField(unique=True, max_length=1000)), - ('subject', models.CharField(max_length=500)), - ('firstmessage', models.DateTimeField()), - ('firstauthor', models.CharField(max_length=500)), - ('latestmessage', models.DateTimeField()), - ('latestauthor', models.CharField(max_length=500)), - ('latestsubject', models.CharField(max_length=500)), - ('latestmsgid', models.CharField(max_length=1000)), + ( + "id", + models.AutoField( + verbose_name="ID", + serialize=False, + auto_created=True, + primary_key=True, + ), + ), + ("messageid", models.CharField(unique=True, max_length=1000)), + ("subject", models.CharField(max_length=500)), + ("firstmessage", models.DateTimeField()), + ("firstauthor", models.CharField(max_length=500)), + ("latestmessage", models.DateTimeField()), + ("latestauthor", models.CharField(max_length=500)), + ("latestsubject", models.CharField(max_length=500)), + ("latestmsgid", models.CharField(max_length=1000)), ], options={ - 'ordering': ('firstmessage',), + "ordering": ("firstmessage",), }, ), migrations.CreateModel( - name='MailThreadAnnotation', + name="MailThreadAnnotation", fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('date', models.DateTimeField(auto_now_add=True)), - ('msgid', models.CharField(max_length=1000)), - ('annotationtext', models.TextField(max_length=2000)), - ('mailsubject', models.CharField(max_length=500)), - ('maildate', models.DateTimeField()), - ('mailauthor', models.CharField(max_length=500)), - ('mailthread', models.ForeignKey(to='commitfest.MailThread', on_delete=models.CASCADE)), - ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)), + ( + "id", + models.AutoField( + verbose_name="ID", + serialize=False, + auto_created=True, + primary_key=True, + ), + ), + ("date", models.DateTimeField(auto_now_add=True)), + ("msgid", models.CharField(max_length=1000)), + ("annotationtext", models.TextField(max_length=2000)), + ("mailsubject", models.CharField(max_length=500)), + ("maildate", models.DateTimeField()), + ("mailauthor", models.CharField(max_length=500)), + ( + "mailthread", + models.ForeignKey( + to="commitfest.MailThread", on_delete=models.CASCADE + ), + ), + ( + "user", + models.ForeignKey( + to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE + ), + ), ], options={ - 'ordering': ('date',), + "ordering": ("date",), }, ), migrations.CreateModel( - name='MailThreadAttachment', + name="MailThreadAttachment", fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('messageid', models.CharField(max_length=1000)), - ('attachmentid', models.IntegerField()), - ('filename', models.CharField(max_length=1000, blank=True)), - ('date', models.DateTimeField()), - ('author', models.CharField(max_length=500)), - ('ispatch', models.BooleanField(null=True)), - ('mailthread', models.ForeignKey(to='commitfest.MailThread', on_delete=models.CASCADE)), + ( + "id", + models.AutoField( + verbose_name="ID", + serialize=False, + auto_created=True, + primary_key=True, + ), + ), + ("messageid", models.CharField(max_length=1000)), + ("attachmentid", models.IntegerField()), + ("filename", models.CharField(max_length=1000, blank=True)), + ("date", models.DateTimeField()), + ("author", models.CharField(max_length=500)), + ("ispatch", models.BooleanField(null=True)), + ( + "mailthread", + models.ForeignKey( + to="commitfest.MailThread", on_delete=models.CASCADE + ), + ), ], options={ - 'ordering': ('-date',), + "ordering": ("-date",), }, ), migrations.CreateModel( - name='Patch', + name="Patch", fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('name', models.CharField(max_length=500, verbose_name='Description')), - ('wikilink', models.URLField(default='', null=False, blank=True)), - ('gitlink', models.URLField(default='', null=False, blank=True)), - ('created', models.DateTimeField(auto_now_add=True)), - ('modified', models.DateTimeField()), - ('lastmail', models.DateTimeField(null=True, blank=True)), - ('authors', models.ManyToManyField(related_name='patch_author', to=settings.AUTH_USER_MODEL, blank=True)), + ( + "id", + models.AutoField( + verbose_name="ID", + serialize=False, + auto_created=True, + primary_key=True, + ), + ), + ("name", models.CharField(max_length=500, verbose_name="Description")), + ("wikilink", models.URLField(default="", null=False, blank=True)), + ("gitlink", models.URLField(default="", null=False, blank=True)), + ("created", models.DateTimeField(auto_now_add=True)), + ("modified", models.DateTimeField()), + ("lastmail", models.DateTimeField(null=True, blank=True)), + ( + "authors", + models.ManyToManyField( + related_name="patch_author", + to=settings.AUTH_USER_MODEL, + blank=True, + ), + ), ], options={ - 'verbose_name_plural': 'patches', + "verbose_name_plural": "patches", }, bases=(models.Model, pgcommitfest.commitfest.util.DiffableModel), ), migrations.CreateModel( - name='PatchHistory', + name="PatchHistory", fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('date', models.DateTimeField(auto_now_add=True)), - ('what', models.CharField(max_length=500)), - ('by', models.ForeignKey(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)), - ('patch', models.ForeignKey(to='commitfest.Patch', on_delete=models.CASCADE)), + ( + "id", + models.AutoField( + verbose_name="ID", + serialize=False, + auto_created=True, + primary_key=True, + ), + ), + ("date", models.DateTimeField(auto_now_add=True)), + ("what", models.CharField(max_length=500)), + ( + "by", + models.ForeignKey( + to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE + ), + ), + ( + "patch", + models.ForeignKey(to="commitfest.Patch", on_delete=models.CASCADE), + ), ], options={ - 'ordering': ('-date',), + "ordering": ("-date",), }, ), migrations.CreateModel( - name='PatchOnCommitFest', + name="PatchOnCommitFest", fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('enterdate', models.DateTimeField()), - ('leavedate', models.DateTimeField(null=True, blank=True)), - ('status', models.IntegerField(default=1, choices=[(1, 'Needs review'), (2, 'Waiting on Author'), (3, 'Ready for Committer'), (4, 'Committed'), (5, 'Moved to next CF'), (6, 'Rejected'), (7, 'Returned with feedback')])), - ('commitfest', models.ForeignKey(to='commitfest.CommitFest', on_delete=models.CASCADE)), - ('patch', models.ForeignKey(to='commitfest.Patch', on_delete=models.CASCADE)), + ( + "id", + models.AutoField( + verbose_name="ID", + serialize=False, + auto_created=True, + primary_key=True, + ), + ), + ("enterdate", models.DateTimeField()), + ("leavedate", models.DateTimeField(null=True, blank=True)), + ( + "status", + models.IntegerField( + default=1, + choices=[ + (1, "Needs review"), + (2, "Waiting on Author"), + (3, "Ready for Committer"), + (4, "Committed"), + (5, "Moved to next CF"), + (6, "Rejected"), + (7, "Returned with feedback"), + ], + ), + ), + ( + "commitfest", + models.ForeignKey( + to="commitfest.CommitFest", on_delete=models.CASCADE + ), + ), + ( + "patch", + models.ForeignKey(to="commitfest.Patch", on_delete=models.CASCADE), + ), ], options={ - 'ordering': ('-commitfest__startdate',), + "ordering": ("-commitfest__startdate",), }, ), migrations.CreateModel( - name='PatchStatus', + name="PatchStatus", fields=[ - ('status', models.IntegerField(serialize=False, primary_key=True)), - ('statusstring', models.TextField(max_length=50)), - ('sortkey', models.IntegerField(default=10)), + ("status", models.IntegerField(serialize=False, primary_key=True)), + ("statusstring", models.TextField(max_length=50)), + ("sortkey", models.IntegerField(default=10)), ], ), migrations.CreateModel( - name='Topic', + name="Topic", fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('topic', models.CharField(max_length=100)), + ( + "id", + models.AutoField( + verbose_name="ID", + serialize=False, + auto_created=True, + primary_key=True, + ), + ), + ("topic", models.CharField(max_length=100)), ], ), migrations.AddField( - model_name='patch', - name='commitfests', - field=models.ManyToManyField(to='commitfest.CommitFest', through='commitfest.PatchOnCommitFest'), + model_name="patch", + name="commitfests", + field=models.ManyToManyField( + to="commitfest.CommitFest", through="commitfest.PatchOnCommitFest" + ), ), migrations.AddField( - model_name='patch', - name='committer', - field=models.ForeignKey(blank=True, to='commitfest.Committer', null=True, on_delete=models.CASCADE), + model_name="patch", + name="committer", + field=models.ForeignKey( + blank=True, + to="commitfest.Committer", + null=True, + on_delete=models.CASCADE, + ), ), migrations.AddField( - model_name='patch', - name='reviewers', - field=models.ManyToManyField(related_name='patch_reviewer', to=settings.AUTH_USER_MODEL, blank=True), + model_name="patch", + name="reviewers", + field=models.ManyToManyField( + related_name="patch_reviewer", to=settings.AUTH_USER_MODEL, blank=True + ), ), migrations.AddField( - model_name='patch', - name='topic', - field=models.ForeignKey(to='commitfest.Topic', on_delete=models.CASCADE), + model_name="patch", + name="topic", + field=models.ForeignKey(to="commitfest.Topic", on_delete=models.CASCADE), ), migrations.AddField( - model_name='mailthread', - name='patches', - field=models.ManyToManyField(to='commitfest.Patch'), + model_name="mailthread", + name="patches", + field=models.ManyToManyField(to="commitfest.Patch"), ), migrations.AlterUniqueTogether( - name='patchoncommitfest', - unique_together=set([('patch', 'commitfest')]), + name="patchoncommitfest", + unique_together=set([("patch", "commitfest")]), ), migrations.AlterUniqueTogether( - name='mailthreadattachment', - unique_together=set([('mailthread', 'messageid')]), + name="mailthreadattachment", + unique_together=set([("mailthread", "messageid")]), ), ] diff --git a/pgcommitfest/commitfest/migrations/0002_notifications.py b/pgcommitfest/commitfest/migrations/0002_notifications.py index 7fc2396e..450ddfbe 100644 --- a/pgcommitfest/commitfest/migrations/0002_notifications.py +++ b/pgcommitfest/commitfest/migrations/0002_notifications.py @@ -1,29 +1,48 @@ # -*- coding: utf-8 -*- from __future__ import unicode_literals -from django.db import migrations, models from django.conf import settings +from django.db import migrations, models class Migration(migrations.Migration): - dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('commitfest', '0001_initial'), + ("commitfest", "0001_initial"), ] operations = [ migrations.CreateModel( - name='PendingNotification', + name="PendingNotification", fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('history', models.ForeignKey(to='commitfest.PatchHistory', on_delete=models.CASCADE)), - ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)), + ( + "id", + models.AutoField( + verbose_name="ID", + serialize=False, + auto_created=True, + primary_key=True, + ), + ), + ( + "history", + models.ForeignKey( + to="commitfest.PatchHistory", on_delete=models.CASCADE + ), + ), + ( + "user", + models.ForeignKey( + to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE + ), + ), ], ), migrations.AddField( - model_name='patch', - name='subscribers', - field=models.ManyToManyField(related_name='patch_subscriber', to=settings.AUTH_USER_MODEL, blank=True), + model_name="patch", + name="subscribers", + field=models.ManyToManyField( + related_name="patch_subscriber", to=settings.AUTH_USER_MODEL, blank=True + ), ), ] diff --git a/pgcommitfest/commitfest/migrations/0003_withdrawn_status.py b/pgcommitfest/commitfest/migrations/0003_withdrawn_status.py index e6cdea95..2f6a5f7d 100644 --- a/pgcommitfest/commitfest/migrations/0003_withdrawn_status.py +++ b/pgcommitfest/commitfest/migrations/0003_withdrawn_status.py @@ -5,16 +5,27 @@ class Migration(migrations.Migration): - dependencies = [ - ('commitfest', '0002_notifications'), + ("commitfest", "0002_notifications"), ] operations = [ migrations.AlterField( - model_name='patchoncommitfest', - name='status', - field=models.IntegerField(default=1, choices=[(1, 'Needs review'), (2, 'Waiting on Author'), (3, 'Ready for Committer'), (4, 'Committed'), (5, 'Moved to next CF'), (6, 'Rejected'), (7, 'Returned with feedback'), (8, 'Withdrawn')]), + model_name="patchoncommitfest", + name="status", + field=models.IntegerField( + default=1, + choices=[ + (1, "Needs review"), + (2, "Waiting on Author"), + (3, "Ready for Committer"), + (4, "Committed"), + (5, "Moved to next CF"), + (6, "Rejected"), + (7, "Returned with feedback"), + (8, "Withdrawn"), + ], + ), ), migrations.RunSQL(""" INSERT INTO commitfest_patchstatus (status, statusstring, sortkey) VALUES @@ -28,5 +39,7 @@ class Migration(migrations.Migration): (8,'Withdrawn', 50) ON CONFLICT (status) DO UPDATE SET statusstring=excluded.statusstring, sortkey=excluded.sortkey; """), - migrations.RunSQL("DELETE FROM commitfest_patchstatus WHERE status < 1 OR status > 8"), + migrations.RunSQL( + "DELETE FROM commitfest_patchstatus WHERE status < 1 OR status > 8" + ), ] diff --git a/pgcommitfest/commitfest/migrations/0004_target_version.py b/pgcommitfest/commitfest/migrations/0004_target_version.py index b307883d..ad546109 100644 --- a/pgcommitfest/commitfest/migrations/0004_target_version.py +++ b/pgcommitfest/commitfest/migrations/0004_target_version.py @@ -2,30 +2,45 @@ # Generated by Django 1.11.17 on 2019-02-06 19:43 from __future__ import unicode_literals -from django.db import migrations, models import django.db.models.deletion +from django.db import migrations, models class Migration(migrations.Migration): - dependencies = [ - ('commitfest', '0003_withdrawn_status'), + ("commitfest", "0003_withdrawn_status"), ] operations = [ migrations.CreateModel( - name='TargetVersion', + name="TargetVersion", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('version', models.CharField(max_length=8, unique=True)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("version", models.CharField(max_length=8, unique=True)), ], options={ - 'ordering': ['-version', ], + "ordering": [ + "-version", + ], }, ), migrations.AddField( - model_name='patch', - name='targetversion', - field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='commitfest.TargetVersion', verbose_name='Target version'), + model_name="patch", + name="targetversion", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="commitfest.TargetVersion", + verbose_name="Target version", + ), ), ] diff --git a/pgcommitfest/commitfest/migrations/0005_history_dateindex.py b/pgcommitfest/commitfest/migrations/0005_history_dateindex.py index c7be8fcc..4316f212 100644 --- a/pgcommitfest/commitfest/migrations/0005_history_dateindex.py +++ b/pgcommitfest/commitfest/migrations/0005_history_dateindex.py @@ -6,15 +6,14 @@ class Migration(migrations.Migration): - dependencies = [ - ('commitfest', '0004_target_version'), + ("commitfest", "0004_target_version"), ] operations = [ migrations.AlterField( - model_name='patchhistory', - name='date', + model_name="patchhistory", + name="date", field=models.DateTimeField(auto_now_add=True, db_index=True), ), ] diff --git a/pgcommitfest/commitfest/migrations/0006_cfbot_integration.py b/pgcommitfest/commitfest/migrations/0006_cfbot_integration.py new file mode 100644 index 00000000..0a1ee6b8 --- /dev/null +++ b/pgcommitfest/commitfest/migrations/0006_cfbot_integration.py @@ -0,0 +1,120 @@ +# Generated by Django 4.2.17 on 2024-12-21 14:15 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("commitfest", "0005_history_dateindex"), + ] + + operations = [ + migrations.CreateModel( + name="CfbotBranch", + fields=[ + ( + "patch", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + primary_key=True, + related_name="cfbot_branch", + serialize=False, + to="commitfest.patch", + ), + ), + ("branch_id", models.IntegerField()), + ("branch_name", models.TextField()), + ("commit_id", models.TextField(blank=True, null=True)), + ("apply_url", models.TextField()), + ( + "status", + models.TextField( + choices=[ + ("testing", "Testing"), + ("finished", "Finished"), + ("failed", "Failed"), + ("timeout", "Timeout"), + ] + ), + ), + ("created", models.DateTimeField(auto_now_add=True)), + ("modified", models.DateTimeField(auto_now=True)), + ], + ), + migrations.CreateModel( + name="CfbotTask", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("task_id", models.TextField(unique=True)), + ("task_name", models.TextField()), + ("branch_id", models.IntegerField()), + ("position", models.IntegerField()), + ( + "status", + models.TextField( + choices=[ + ("CREATED", "Created"), + ("NEEDS_APPROVAL", "Needs Approval"), + ("TRIGGERED", "Triggered"), + ("EXECUTING", "Executing"), + ("FAILED", "Failed"), + ("COMPLETED", "Completed"), + ("SCHEDULED", "Scheduled"), + ("ABORTED", "Aborted"), + ("ERRORED", "Errored"), + ] + ), + ), + ("created", models.DateTimeField(auto_now_add=True)), + ("modified", models.DateTimeField(auto_now=True)), + ( + "patch", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="cfbot_tasks", + to="commitfest.patch", + ), + ), + ], + ), + migrations.RunSQL( + """ + CREATE TYPE cfbotbranch_status AS ENUM ( + 'testing', + 'finished', + 'failed', + 'timeout' + ); + """ + ), + migrations.RunSQL( + """ + CREATE TYPE cfbottask_status AS ENUM ( + 'CREATED', + 'NEEDS_APPROVAL', + 'TRIGGERED', + 'EXECUTING', + 'FAILED', + 'COMPLETED', + 'SCHEDULED', + 'ABORTED', + 'ERRORED' + ); + """ + ), + migrations.RunSQL( + """ + ALTER TABLE commitfest_cfbotbranch + ALTER COLUMN status TYPE cfbotbranch_status + USING status::cfbotbranch_status; + """ + ), + migrations.RunSQL( + """ + ALTER TABLE commitfest_cfbottask + ALTER COLUMN status TYPE cfbottask_status + USING status::cfbottask_status; + """ + ), + ] diff --git a/pgcommitfest/commitfest/migrations/0007_needs_rebase_emails.py b/pgcommitfest/commitfest/migrations/0007_needs_rebase_emails.py new file mode 100644 index 00000000..cd3d291d --- /dev/null +++ b/pgcommitfest/commitfest/migrations/0007_needs_rebase_emails.py @@ -0,0 +1,46 @@ +# Generated by Django 4.2.17 on 2024-12-25 11:17 + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ("commitfest", "0006_cfbot_integration"), + ] + + operations = [ + migrations.AddField( + model_name="cfbotbranch", + name="needs_rebase_since", + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name="patchhistory", + name="by_cfbot", + field=models.BooleanField(default=False), + ), + migrations.AlterField( + model_name="patchhistory", + name="by", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), + migrations.AddConstraint( + model_name="patchhistory", + constraint=models.CheckConstraint( + check=models.Q( + models.Q(("by_cfbot", True), ("by__isnull", True)), + models.Q(("by_cfbot", False), ("by__isnull", False)), + _connector="OR", + ), + name="check_by", + ), + ), + ] diff --git a/pgcommitfest/commitfest/migrations/0008_move_mail_thread_many_to_many.py b/pgcommitfest/commitfest/migrations/0008_move_mail_thread_many_to_many.py new file mode 100644 index 00000000..de8af8c7 --- /dev/null +++ b/pgcommitfest/commitfest/migrations/0008_move_mail_thread_many_to_many.py @@ -0,0 +1,31 @@ +# Generated by Django 4.2.17 on 2025-01-25 11:14 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("commitfest", "0007_needs_rebase_emails"), + ] + + operations = [ + migrations.RunSQL( + migrations.RunSQL.noop, + reverse_sql=migrations.RunSQL.noop, + state_operations=[ + migrations.RemoveField( + model_name="mailthread", + name="patches", + ), + migrations.AddField( + model_name="patch", + name="mailthread_set", + field=models.ManyToManyField( + db_table="commitfest_mailthread_patches", + related_name="patches", + to="commitfest.mailthread", + ), + ), + ], + ) + ] diff --git a/pgcommitfest/commitfest/migrations/0009_extra_branch_fields.py b/pgcommitfest/commitfest/migrations/0009_extra_branch_fields.py new file mode 100644 index 00000000..7e53dd3a --- /dev/null +++ b/pgcommitfest/commitfest/migrations/0009_extra_branch_fields.py @@ -0,0 +1,42 @@ +# Generated by Django 4.2.17 on 2025-01-31 11:47 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("commitfest", "0008_move_mail_thread_many_to_many"), + ] + + operations = [ + migrations.AddField( + model_name="cfbotbranch", + name="all_additions", + field=models.IntegerField(blank=True, null=True), + ), + migrations.AddField( + model_name="cfbotbranch", + name="all_deletions", + field=models.IntegerField(blank=True, null=True), + ), + migrations.AddField( + model_name="cfbotbranch", + name="first_additions", + field=models.IntegerField(blank=True, null=True), + ), + migrations.AddField( + model_name="cfbotbranch", + name="first_deletions", + field=models.IntegerField(blank=True, null=True), + ), + migrations.AddField( + model_name="cfbotbranch", + name="patch_count", + field=models.IntegerField(blank=True, null=True), + ), + migrations.AddField( + model_name="cfbotbranch", + name="version", + field=models.TextField(blank=True, null=True), + ), + ] diff --git a/pgcommitfest/commitfest/models.py b/pgcommitfest/commitfest/models.py index 28722f06..05956b83 100644 --- a/pgcommitfest/commitfest/models.py +++ b/pgcommitfest/commitfest/models.py @@ -1,18 +1,20 @@ -from django.db import models from django.contrib.auth.models import User +from django.db import models from datetime import datetime -from .util import DiffableModel - from pgcommitfest.userprofile.models import UserProfile +from .util import DiffableModel + # We have few enough of these, and it's really the only thing we # need to extend from the user model, so just create a separate # class. class Committer(models.Model): - user = models.OneToOneField(User, null=False, blank=False, primary_key=True, on_delete=models.CASCADE) + user = models.OneToOneField( + User, null=False, blank=False, primary_key=True, on_delete=models.CASCADE + ) active = models.BooleanField(null=False, blank=False, default=True) def __str__(self): @@ -20,10 +22,14 @@ def __str__(self): @property def fullname(self): - return "%s %s (%s)" % (self.user.first_name, self.user.last_name, self.user.username) + return "%s %s (%s)" % ( + self.user.first_name, + self.user.last_name, + self.user.username, + ) class Meta: - ordering = ('user__last_name', 'user__first_name') + ordering = ("user__last_name", "user__first_name") class CommitFest(models.Model): @@ -32,13 +38,15 @@ class CommitFest(models.Model): STATUS_INPROGRESS = 3 STATUS_CLOSED = 4 _STATUS_CHOICES = ( - (STATUS_FUTURE, 'Future'), - (STATUS_OPEN, 'Open'), - (STATUS_INPROGRESS, 'In Progress'), - (STATUS_CLOSED, 'Closed'), + (STATUS_FUTURE, "Future"), + (STATUS_OPEN, "Open"), + (STATUS_INPROGRESS, "In Progress"), + (STATUS_CLOSED, "Closed"), ) name = models.CharField(max_length=100, blank=False, null=False, unique=True) - status = models.IntegerField(null=False, blank=False, default=1, choices=_STATUS_CHOICES) + status = models.IntegerField( + null=False, blank=False, default=1, choices=_STATUS_CHOICES + ) startdate = models.DateField(blank=True, null=True) enddate = models.DateField(blank=True, null=True) @@ -64,8 +72,8 @@ def __str__(self): return self.name class Meta: - verbose_name_plural = 'Commitfests' - ordering = ('-startdate',) + verbose_name_plural = "Commitfests" + ordering = ("-startdate",) class Topic(models.Model): @@ -79,35 +87,56 @@ class TargetVersion(models.Model): version = models.CharField(max_length=8, blank=False, null=False, unique=True) class Meta: - ordering = ['-version', ] + ordering = [ + "-version", + ] def __str__(self): return self.version class Patch(models.Model, DiffableModel): - name = models.CharField(max_length=500, blank=False, null=False, verbose_name='Description') + name = models.CharField( + max_length=500, blank=False, null=False, verbose_name="Description" + ) topic = models.ForeignKey(Topic, blank=False, null=False, on_delete=models.CASCADE) # One patch can be in multiple commitfests, if it has history - commitfests = models.ManyToManyField(CommitFest, through='PatchOnCommitFest') + commitfests = models.ManyToManyField(CommitFest, through="PatchOnCommitFest") # If there is a wiki page discussing this patch - wikilink = models.URLField(blank=True, null=False, default='') + wikilink = models.URLField(blank=True, null=False, default="") # If there is a git repo about this patch - gitlink = models.URLField(blank=True, null=False, default='') + gitlink = models.URLField(blank=True, null=False, default="") # Version targeted by this patch - targetversion = models.ForeignKey(TargetVersion, blank=True, null=True, verbose_name="Target version", on_delete=models.CASCADE) + targetversion = models.ForeignKey( + TargetVersion, + blank=True, + null=True, + verbose_name="Target version", + on_delete=models.CASCADE, + ) - authors = models.ManyToManyField(User, related_name='patch_author', blank=True) - reviewers = models.ManyToManyField(User, related_name='patch_reviewer', blank=True) + authors = models.ManyToManyField(User, related_name="patch_author", blank=True) + reviewers = models.ManyToManyField(User, related_name="patch_reviewer", blank=True) - committer = models.ForeignKey(Committer, blank=True, null=True, on_delete=models.CASCADE) + committer = models.ForeignKey( + Committer, blank=True, null=True, on_delete=models.CASCADE + ) # Users to be notified when something happens - subscribers = models.ManyToManyField(User, related_name='patch_subscriber', blank=True) + subscribers = models.ManyToManyField( + User, related_name="patch_subscriber", blank=True + ) + + mailthread_set = models.ManyToManyField( + "MailThread", + related_name="patches", + blank=False, + db_table="commitfest_mailthread_patches", + ) # Datestamps for tracking activity created = models.DateTimeField(blank=False, null=False, auto_now_add=True) @@ -118,24 +147,37 @@ class Patch(models.Model, DiffableModel): lastmail = models.DateTimeField(blank=True, null=True) map_manytomany_for_diff = { - 'authors': 'authors_string', - 'reviewers': 'reviewers_string', + "authors": "authors_string", + "reviewers": "reviewers_string", } + def current_commitfest(self): + return self.commitfests.order_by("-startdate").first() + # Some accessors @property def authors_string(self): - return ", ".join(["%s %s (%s)" % (a.first_name, a.last_name, a.username) for a in self.authors.all()]) + return ", ".join( + [ + "%s %s (%s)" % (a.first_name, a.last_name, a.username) + for a in self.authors.all() + ] + ) @property def reviewers_string(self): - return ", ".join(["%s %s (%s)" % (a.first_name, a.last_name, a.username) for a in self.reviewers.all()]) + return ", ".join( + [ + "%s %s (%s)" % (a.first_name, a.last_name, a.username) + for a in self.reviewers.all() + ] + ) @property def history(self): # Need to wrap this in a function to make sure it calls # select_related() and doesn't generate a bazillion queries - return self.patchhistory_set.select_related('by').all() + return self.patchhistory_set.select_related("by").all() def set_modified(self, newmod=None): # Set the modified date to newmod, but only if that's newer than @@ -159,7 +201,7 @@ def __str__(self): return self.name class Meta: - verbose_name_plural = 'patches' + verbose_name_plural = "patches" class PatchOnCommitFest(models.Model): @@ -176,24 +218,24 @@ class PatchOnCommitFest(models.Model): STATUS_RETURNED = 7 STATUS_WITHDRAWN = 8 _STATUS_CHOICES = ( - (STATUS_REVIEW, 'Needs review'), - (STATUS_AUTHOR, 'Waiting on Author'), - (STATUS_COMMITTER, 'Ready for Committer'), - (STATUS_COMMITTED, 'Committed'), - (STATUS_NEXT, 'Moved to next CF'), - (STATUS_REJECTED, 'Rejected'), - (STATUS_RETURNED, 'Returned with feedback'), - (STATUS_WITHDRAWN, 'Withdrawn'), + (STATUS_REVIEW, "Needs review"), + (STATUS_AUTHOR, "Waiting on Author"), + (STATUS_COMMITTER, "Ready for Committer"), + (STATUS_COMMITTED, "Committed"), + (STATUS_NEXT, "Moved to next CF"), + (STATUS_REJECTED, "Rejected"), + (STATUS_RETURNED, "Returned with feedback"), + (STATUS_WITHDRAWN, "Withdrawn"), ) _STATUS_LABELS = ( - (STATUS_REVIEW, 'default'), - (STATUS_AUTHOR, 'primary'), - (STATUS_COMMITTER, 'info'), - (STATUS_COMMITTED, 'success'), - (STATUS_NEXT, 'warning'), - (STATUS_REJECTED, 'danger'), - (STATUS_RETURNED, 'danger'), - (STATUS_WITHDRAWN, 'danger'), + (STATUS_REVIEW, "default"), + (STATUS_AUTHOR, "primary"), + (STATUS_COMMITTER, "info"), + (STATUS_COMMITTED, "success"), + (STATUS_NEXT, "warning"), + (STATUS_REJECTED, "danger"), + (STATUS_RETURNED, "danger"), + (STATUS_WITHDRAWN, "danger"), ) OPEN_STATUSES = [STATUS_REVIEW, STATUS_AUTHOR, STATUS_COMMITTER] @@ -202,11 +244,15 @@ def OPEN_STATUS_CHOICES(cls): return [x for x in cls._STATUS_CHOICES if x[0] in cls.OPEN_STATUSES] patch = models.ForeignKey(Patch, blank=False, null=False, on_delete=models.CASCADE) - commitfest = models.ForeignKey(CommitFest, blank=False, null=False, on_delete=models.CASCADE) + commitfest = models.ForeignKey( + CommitFest, blank=False, null=False, on_delete=models.CASCADE + ) enterdate = models.DateTimeField(blank=False, null=False) leavedate = models.DateTimeField(blank=True, null=True) - status = models.IntegerField(blank=False, null=False, default=STATUS_REVIEW, choices=_STATUS_CHOICES) + status = models.IntegerField( + blank=False, null=False, default=STATUS_REVIEW, choices=_STATUS_CHOICES + ) @property def is_closed(self): @@ -217,56 +263,95 @@ def statusstring(self): return [v for k, v in self._STATUS_CHOICES if k == self.status][0] class Meta: - unique_together = (('patch', 'commitfest',),) - ordering = ('-commitfest__startdate', ) + unique_together = ( + ( + "patch", + "commitfest", + ), + ) + ordering = ("-commitfest__startdate",) class PatchHistory(models.Model): patch = models.ForeignKey(Patch, blank=False, null=False, on_delete=models.CASCADE) - date = models.DateTimeField(blank=False, null=False, auto_now_add=True, db_index=True) - by = models.ForeignKey(User, blank=False, null=False, on_delete=models.CASCADE) + date = models.DateTimeField( + blank=False, null=False, auto_now_add=True, db_index=True + ) + by = models.ForeignKey(User, blank=True, null=True, on_delete=models.CASCADE) + by_cfbot = models.BooleanField(null=False, blank=False, default=False) what = models.CharField(max_length=500, null=False, blank=False) @property def by_string(self): + if self.by_cfbot: + return "CFbot" + return "%s %s (%s)" % (self.by.first_name, self.by.last_name, self.by.username) def __str__(self): return "%s - %s" % (self.patch.name, self.date) class Meta: - ordering = ('-date', ) - - def save_and_notify(self, prevcommitter=None, - prevreviewers=None, prevauthors=None): + ordering = ("-date",) + constraints = [ + models.CheckConstraint( + check=(models.Q(by_cfbot=True) & models.Q(by__isnull=True)) + | (models.Q(by_cfbot=False) & models.Q(by__isnull=False)), + name="check_by", + ), + ] + + def save_and_notify( + self, + prevcommitter=None, + prevreviewers=None, + prevauthors=None, + authors_only=False, + ): # Save this model, and then trigger notifications if there are any. There are # many different things that can trigger notifications, so try them all. self.save() recipients = [] - recipients.extend(self.patch.subscribers.all()) - - # Current or previous committer wants all notifications - try: - if self.patch.committer and self.patch.committer.user.userprofile.notify_all_committer: - recipients.append(self.patch.committer.user) - except UserProfile.DoesNotExist: - pass - - try: - if prevcommitter and prevcommitter.user.userprofile.notify_all_committer: - recipients.append(prevcommitter.user) - except UserProfile.DoesNotExist: - pass - - # Current or previous reviewers wants all notifications - recipients.extend(self.patch.reviewers.filter(userprofile__notify_all_reviewer=True)) - if prevreviewers: - # prevreviewers is a list - recipients.extend(User.objects.filter(id__in=[p.id for p in prevreviewers], userprofile__notify_all_reviewer=True)) + if not authors_only: + recipients.extend(self.patch.subscribers.all()) + + # Current or previous committer wants all notifications + try: + if ( + self.patch.committer + and self.patch.committer.user.userprofile.notify_all_committer + ): + recipients.append(self.patch.committer.user) + except UserProfile.DoesNotExist: + pass + + try: + if ( + prevcommitter + and prevcommitter.user.userprofile.notify_all_committer + ): + recipients.append(prevcommitter.user) + except UserProfile.DoesNotExist: + pass + + # Current or previous reviewers wants all notifications + recipients.extend( + self.patch.reviewers.filter(userprofile__notify_all_reviewer=True) + ) + if prevreviewers: + # prevreviewers is a list + recipients.extend( + User.objects.filter( + id__in=[p.id for p in prevreviewers], + userprofile__notify_all_reviewer=True, + ) + ) # Current or previous authors wants all notifications - recipients.extend(self.patch.authors.filter(userprofile__notify_all_author=True)) + recipients.extend( + self.patch.authors.filter(userprofile__notify_all_author=True) + ) for u in set(recipients): if u != self.by: # Don't notify for changes we make ourselves @@ -284,7 +369,6 @@ class MailThread(models.Model): # so we can keep track of when there was last a change on the # thread in question. messageid = models.CharField(max_length=1000, null=False, blank=False, unique=True) - patches = models.ManyToManyField(Patch, blank=False) subject = models.CharField(max_length=500, null=False, blank=False) firstmessage = models.DateTimeField(null=False, blank=False) firstauthor = models.CharField(max_length=500, null=False, blank=False) @@ -297,11 +381,13 @@ def __str__(self): return self.subject class Meta: - ordering = ('firstmessage', ) + ordering = ("firstmessage",) class MailThreadAttachment(models.Model): - mailthread = models.ForeignKey(MailThread, null=False, blank=False, on_delete=models.CASCADE) + mailthread = models.ForeignKey( + MailThread, null=False, blank=False, on_delete=models.CASCADE + ) messageid = models.CharField(max_length=1000, null=False, blank=False) attachmentid = models.IntegerField(null=False, blank=False) filename = models.CharField(max_length=1000, null=False, blank=True) @@ -310,12 +396,19 @@ class MailThreadAttachment(models.Model): ispatch = models.BooleanField(null=True) class Meta: - ordering = ('-date',) - unique_together = (('mailthread', 'messageid',), ) + ordering = ("-date",) + unique_together = ( + ( + "mailthread", + "messageid", + ), + ) class MailThreadAnnotation(models.Model): - mailthread = models.ForeignKey(MailThread, null=False, blank=False, on_delete=models.CASCADE) + mailthread = models.ForeignKey( + MailThread, null=False, blank=False, on_delete=models.CASCADE + ) date = models.DateTimeField(null=False, blank=False, auto_now_add=True) user = models.ForeignKey(User, null=False, blank=False, on_delete=models.CASCADE) msgid = models.CharField(max_length=1000, null=False, blank=False) @@ -326,10 +419,14 @@ class MailThreadAnnotation(models.Model): @property def user_string(self): - return "%s %s (%s)" % (self.user.first_name, self.user.last_name, self.user.username) + return "%s %s (%s)" % ( + self.user.first_name, + self.user.last_name, + self.user.username, + ) class Meta: - ordering = ('date', ) + ordering = ("date",) class PatchStatus(models.Model): @@ -339,5 +436,79 @@ class PatchStatus(models.Model): class PendingNotification(models.Model): - history = models.ForeignKey(PatchHistory, blank=False, null=False, on_delete=models.CASCADE) + history = models.ForeignKey( + PatchHistory, blank=False, null=False, on_delete=models.CASCADE + ) user = models.ForeignKey(User, blank=False, null=False, on_delete=models.CASCADE) + + +class CfbotBranch(models.Model): + STATUS_CHOICES = [ + ("testing", "Testing"), + ("finished", "Finished"), + ("failed", "Failed"), + ("timeout", "Timeout"), + ] + + patch = models.OneToOneField( + Patch, on_delete=models.CASCADE, related_name="cfbot_branch", primary_key=True + ) + branch_id = models.IntegerField(null=False) + branch_name = models.TextField(null=False) + commit_id = models.TextField(null=True, blank=True) + apply_url = models.TextField(null=False) + # Actually a postgres enum column + status = models.TextField(choices=STATUS_CHOICES, null=False) + needs_rebase_since = models.DateTimeField(null=True, blank=True) + created = models.DateTimeField(auto_now_add=True) + modified = models.DateTimeField(auto_now=True) + version = models.TextField(null=True, blank=True) + patch_count = models.IntegerField(null=True, blank=True) + first_additions = models.IntegerField(null=True, blank=True) + first_deletions = models.IntegerField(null=True, blank=True) + all_additions = models.IntegerField(null=True, blank=True) + all_deletions = models.IntegerField(null=True, blank=True) + + def save(self, *args, **kwargs): + """Only used by the admin panel to save empty commit id as NULL + + The actual cfbot webhook doesn't use the django ORM to save the data. + """ + + if not self.commit_id: + self.commit_id = None + super(CfbotBranch, self).save(*args, **kwargs) + + +class CfbotTask(models.Model): + STATUS_CHOICES = [ + ("CREATED", "Created"), + ("NEEDS_APPROVAL", "Needs Approval"), + ("TRIGGERED", "Triggered"), + ("EXECUTING", "Executing"), + ("FAILED", "Failed"), + ("COMPLETED", "Completed"), + ("SCHEDULED", "Scheduled"), + ("ABORTED", "Aborted"), + ("ERRORED", "Errored"), + ] + + # This id is only used by Django. Using text type for primary keys, has + # historically caused problems. + id = models.BigAutoField(primary_key=True) + # This is the id used by the external CI system. Currently with CirrusCI + # this is an integer, and thus we could probably store it as such. But + # given that we might need to change CI providers at some point, and that + # CI provider might use e.g. UUIDs, we prefer to consider the format of the + # ID opaque and store it as text. + task_id = models.TextField(unique=True) + task_name = models.TextField(null=False) + patch = models.ForeignKey( + Patch, on_delete=models.CASCADE, related_name="cfbot_tasks" + ) + branch_id = models.IntegerField(null=False) + position = models.IntegerField(null=False) + # Actually a postgres enum column + status = models.TextField(choices=STATUS_CHOICES, null=False) + created = models.DateTimeField(auto_now_add=True) + modified = models.DateTimeField(auto_now=True) diff --git a/pgcommitfest/commitfest/reports.py b/pgcommitfest/commitfest/reports.py index 88f51a9f..e4191e16 100644 --- a/pgcommitfest/commitfest/reports.py +++ b/pgcommitfest/commitfest/reports.py @@ -1,8 +1,7 @@ -from django.shortcuts import render, get_object_or_404 -from django.http import Http404 -from django.template import RequestContext from django.contrib.auth.decorators import login_required from django.db import connection +from django.http import Http404 +from django.shortcuts import get_object_or_404, render from .models import CommitFest @@ -14,7 +13,8 @@ def authorstats(request, cfid): raise Http404("Only CF Managers can do that.") cursor = connection.cursor() - cursor.execute(""" + cursor.execute( + """ WITH patches(id,name) AS ( SELECT p.id, name FROM commitfest_patch p @@ -37,13 +37,20 @@ def authorstats(request, cfid): INNER JOIN auth_user u ON u.id=COALESCE(authors.userid, reviewers.userid) ORDER BY last_name, first_name """, - { - 'cid': cf.id, - }) + { + "cid": cf.id, + }, + ) - return render(request, 'report_authors.html', { - 'cf': cf, - 'report': cursor.fetchall(), - 'title': 'Author stats', - 'breadcrumbs': [{'title': cf.title, 'href': '/%s/' % cf.pk}, ], - }) + return render( + request, + "report_authors.html", + { + "cf": cf, + "report": cursor.fetchall(), + "title": "Author stats", + "breadcrumbs": [ + {"title": cf.title, "href": "/%s/" % cf.pk}, + ], + }, + ) diff --git a/pgcommitfest/commitfest/templates/base.html b/pgcommitfest/commitfest/templates/base.html index 4a6ba990..382c43bc 100644 --- a/pgcommitfest/commitfest/templates/base.html +++ b/pgcommitfest/commitfest/templates/base.html @@ -6,7 +6,7 @@ - + {%block extrahead%}{%endblock%} {%if rss_alternate%} {%endif%} @@ -43,6 +43,6 @@

{{title}}

- + {%block morescript%}{%endblock%} diff --git a/pgcommitfest/commitfest/templates/base_form.html b/pgcommitfest/commitfest/templates/base_form.html index 91f2e91e..3bfaca85 100644 --- a/pgcommitfest/commitfest/templates/base_form.html +++ b/pgcommitfest/commitfest/templates/base_form.html @@ -110,20 +110,5 @@

Search user

$('#searchUserSearchField').focus(); }); {%endif%} - -/* Build our button callbacks */ -$(document).ready(function() { - $('button.attachThreadButton').each(function (i,o) { - var b = $(o); - b.click(function() { - $('#attachThreadAttachOnly').val('1'); - browseThreads(function(msgid) { - b.prev().val(msgid); - return true; - }); - return false; - }); - }); -}); {%endblock%} diff --git a/pgcommitfest/commitfest/templates/commitfest.html b/pgcommitfest/commitfest/templates/commitfest.html index 63f793a6..aee8af9f 100644 --- a/pgcommitfest/commitfest/templates/commitfest.html +++ b/pgcommitfest/commitfest/templates/commitfest.html @@ -60,15 +60,18 @@

{{p.is_open|yesno:"Active patches,Closed patches"}}

- + + + + - - - + + + {%if user.is_staff%} {%endif%} @@ -79,13 +82,43 @@

{{p.is_open|yesno:"Active patches,Closed patches"}}

{%if grouping%} {%ifchanged p.topic%} - -{%endifchanged%} + +{%endifchanged%} {%endif%} - + + + + diff --git a/pgcommitfest/commitfest/templates/mail/patch_notify.txt b/pgcommitfest/commitfest/templates/mail/patch_notify.txt index 1ab838c4..5f3d7443 100644 --- a/pgcommitfest/commitfest/templates/mail/patch_notify.txt +++ b/pgcommitfest/commitfest/templates/mail/patch_notify.txt @@ -5,7 +5,7 @@ have received updates in the PostgreSQL commitfest app: {{p.patch.name}} https://commitfest.postgresql.org/{{p.patch.patchoncommitfest_set.all.0.commitfest.id}}/{{p.patch.id}}/ {%for h in p.entries%} -* {{h.what}} ({{h.by}}){%endfor%} +* {{h.what}} by {{h.by_string()}}{%endfor%} {%endfor%} diff --git a/pgcommitfest/commitfest/templates/patch.html b/pgcommitfest/commitfest/templates/patch.html index 33670cf2..5a2da17c 100644 --- a/pgcommitfest/commitfest/templates/patch.html +++ b/pgcommitfest/commitfest/templates/patch.html @@ -12,6 +12,57 @@ + + + + + + + @@ -31,7 +82,7 @@ @@ -55,21 +106,11 @@ - - - - - - {%for h in patch.history %} + {%for h in patch.history %} @@ -156,7 +197,9 @@

Annotations

{%if p.is_open%}Patch{%if sortkey == 0%}
{%endif%}{%endif%}
Patch{%if sortkey == 5%}
{%endif%}
ID{%if sortkey == 4%}
{%endif%}
Status VerCI statusStats{%if sortkey == 6%}
{%endif%}
Author Reviewers Committer{%if p.is_open%}Num cfs{%if sortkey == 3%}
{%endif%}{%else%}Num cfs{%endif%}
{%if p.is_open%}Latest activity{%if sortkey == 1%}
{%endif%}{%else%}Latest activity{%endif%}
{%if p.is_open%}Latest mail{%if sortkey == 2%}
{%endif%}{%else%}Latest mail{%endif%}
Num cfs{%if sortkey == 3%}
{%endif%}
Latest activity{%if sortkey == 1%}
{%endif%}
Latest mail{%if sortkey == 2%}
{%endif%}
Select
{{p.topic}}
{{p.topic}}
{{p.name}}{{p.name}}{{p.id}} {{p.status|patchstatusstring}} {%if p.targetversion%}{{p.targetversion}}{%endif%} + {%if not p.cfbot_results %} + Not processed + {%elif p.cfbot_results.needs_rebase %} + + Needs rebase! + + {%else%} + + + {%if p.cfbot_results.failed > 0 %} + + {%elif p.cfbot_results.completed < p.cfbot_results.total %} + + {%else%} + + {%endif%} + + {{p.cfbot_results.completed}}/{{p.cfbot_results.total}} + + + {%endif%} + + {%if p.cfbot_results and p.cfbot_results.all_additions is not none %} + +{{ p.cfbot_results.all_additions }}−{{ p.cfbot_results.all_deletions }} + {%endif%} + {{p.author_names|default:''}} {{p.reviewer_names|default:''}} {{p.committer|default:''}}Title {{patch.name}}
CI (CFBot) + {%if not cfbot_branch %} + Not processed + {%elif not cfbot_branch.commit_id %} + + Needs rebase! + Additional links previous successfully applied patch (outdated): + + + Summary + {%else%} + + + Summary + {%for c in cfbot_tasks %} + {%if c.status == 'COMPLETED'%} + + {%elif c.status == 'CREATED' or c.status == 'SCHEDULED' %} + + {%elif c.status == 'EXECUTING' %} + + {%else %} + + {%endif%} + {%endfor%} + {%endif%} + {%if cfbot_branch %} + + {%endif%} + +
Stats (from CFBot) + {%if cfbot_branch and cfbot_branch.commit_id %} + {%if cfbot_branch.version %} + Patch version: {{ cfbot_branch.version }}, + {%endif%} + Patch count: {{ cfbot_branch.patch_count }}, + First patch: +{{ cfbot_branch.first_additions }}−{{ cfbot_branch.first_deletions }}, + All patches: +{{ cfbot_branch.all_additions }}−{{ cfbot_branch.all_deletions }} + {%else%} + Unknown + {%endif%} +
Topic {{patch.topic}}
Status {%for c in patch_commitfests %} -
{{c.commitfest}}: {{c.statusstring}}
+
{{c.commitfest}}: {{c.statusstring}}
{%endfor%}
LinksCFbot results (CirrusCI) - CFbot GitHub{%if patch.wikilink%} + {%if patch.wikilink%} Wiki{%endif%}{%if patch.gitlink%} Git {%endif%}
Checkout latest CFbot patchset - Go to your local checkout of the PostgreSQL repository and run: -
git remote add commitfest https://github.com/postgresql-cfbot/postgresql.git
-git fetch commitfest cf/{{patch.id}}
-git checkout commitfest/cf/{{patch.id}}
-
Emails @@ -138,7 +179,7 @@

Annotations

{{h.date}} {{h.by_string}}
+
{%include "patch_commands.inc"%} +
{%comment%}commit dialog{%endcomment%} @@ -37,4 +37,4 @@ {%endif%} - \ No newline at end of file + diff --git a/pgcommitfest/commitfest/templatetags/commitfest.py b/pgcommitfest/commitfest/templatetags/commitfest.py index c51765d2..f24dbfd8 100644 --- a/pgcommitfest/commitfest/templatetags/commitfest.py +++ b/pgcommitfest/commitfest/templatetags/commitfest.py @@ -1,19 +1,21 @@ -from django.template.defaultfilters import stringfilter from django import template +from django.template.defaultfilters import stringfilter + +from uuid import uuid4 from pgcommitfest.commitfest.models import PatchOnCommitFest register = template.Library() -@register.filter(name='patchstatusstring') +@register.filter(name="patchstatusstring") @stringfilter def patchstatusstring(value): i = int(value) return [v for k, v in PatchOnCommitFest._STATUS_CHOICES if k == i][0] -@register.filter(name='patchstatuslabel') +@register.filter(name="patchstatuslabel") @stringfilter def patchstatuslabel(value): i = int(value) @@ -22,7 +24,7 @@ def patchstatuslabel(value): @register.filter(is_safe=True) def label_class(value, arg): - return value.label_tag(attrs={'class': arg}) + return value.label_tag(attrs={"class": arg}) @register.filter(is_safe=True) @@ -30,20 +32,32 @@ def field_class(value, arg): return value.as_widget(attrs={"class": arg}) -@register.filter(name='alertmap') +@register.filter(name="alertmap") @stringfilter def alertmap(value): - if value == 'error': - return 'alert-danger' - elif value == 'warning': - return 'alert-warning' - elif value == 'success': - return 'alert-success' + if value == "error": + return "alert-danger" + elif value == "warning": + return "alert-warning" + elif value == "success": + return "alert-success" else: - return 'alert-info' + return "alert-info" + + +# Generate a GET parameter that's unique per startup of the python process to +# bust the cache of the client, so that it pulls in possibly updated JS/CSS +# files. +STATIC_FILE_PARAM = f"v={uuid4()}" + + +# This GET parameter should be added to every one of our static files. +@register.simple_tag +def static_file_param(): + return STATIC_FILE_PARAM -@register.filter(name='hidemail') +@register.filter(name="hidemail") @stringfilter def hidemail(value): - return value.replace('@', ' at ') + return value.replace("@", " at ") diff --git a/pgcommitfest/commitfest/util.py b/pgcommitfest/commitfest/util.py index 94ad3e5f..10f9c816 100644 --- a/pgcommitfest/commitfest/util.py +++ b/pgcommitfest/commitfest/util.py @@ -1,5 +1,5 @@ -from django.forms.models import model_to_dict import django.db.models.fields.related +from django.forms.models import model_to_dict class DiffableModel(object): @@ -19,12 +19,15 @@ def diff(self): diffs = dict([(k, (v, d2[k])) for k, v in d1.items() if v != d2[k]]) # Foreign key lookups for k, v in diffs.items(): - if type(self._meta.get_field(k)) is django.db.models.fields.related.ForeignKey: + if ( + type(self._meta.get_field(k)) + is django.db.models.fields.related.ForeignKey + ): # If it's a foreign key, look up the name again on ourselves. # Since we only care about the *new* value, it's easy enough. diffs[k] = (v[0], getattr(self, k)) # Many to many lookups - if hasattr(self, 'map_manytomany_for_diff'): + if hasattr(self, "map_manytomany_for_diff"): for k, v in diffs.items(): if k in manytomanyfieldnames and k in self.map_manytomany_for_diff: # Try to show the display name instead here diff --git a/pgcommitfest/commitfest/views.py b/pgcommitfest/commitfest/views.py index 2c21cd3d..a3291f4d 100644 --- a/pgcommitfest/commitfest/views.py +++ b/pgcommitfest/commitfest/views.py @@ -1,45 +1,67 @@ -from django.shortcuts import render, get_object_or_404 -from django.http import HttpResponse, HttpResponseRedirect -from django.http import Http404, HttpResponseForbidden -from django.views.decorators.csrf import csrf_exempt -from django.db import transaction, connection -from django.db.models import Q +from django.conf import settings from django.contrib import messages from django.contrib.auth.decorators import login_required from django.contrib.auth.models import User +from django.db import connection, transaction +from django.db.models import Q +from django.http import ( + Http404, + HttpResponse, + HttpResponseForbidden, + HttpResponseRedirect, +) +from django.shortcuts import get_object_or_404, render +from django.views.decorators.csrf import csrf_exempt -from django.conf import settings - +import hmac +import json +import urllib from datetime import datetime from email.mime.text import MIMEText from email.utils import formatdate, make_msgid -import json -import urllib from pgcommitfest.mailqueue.util import send_mail, send_simple_mail from pgcommitfest.userprofile.util import UserWrapper -from .models import CommitFest, Patch, PatchOnCommitFest, PatchHistory, Committer -from .models import MailThread -from .forms import PatchForm, NewPatchForm, CommentForm, CommitFestFilterForm -from .forms import BulkEmailForm -from .ajax import doAttachThread, refresh_single_thread, _archivesAPI +from .ajax import _archivesAPI, doAttachThread, refresh_single_thread from .feeds import ActivityFeed +from .forms import ( + BulkEmailForm, + CommentForm, + CommitFestFilterForm, + NewPatchForm, + PatchForm, +) +from .models import ( + CfbotBranch, + CommitFest, + Committer, + MailThread, + Patch, + PatchHistory, + PatchOnCommitFest, +) def home(request): commitfests = list(CommitFest.objects.all()) opencf = next((c for c in commitfests if c.status == CommitFest.STATUS_OPEN), None) - inprogresscf = next((c for c in commitfests if c.status == CommitFest.STATUS_INPROGRESS), None) - - return render(request, 'home.html', { - 'commitfests': commitfests, - 'opencf': opencf, - 'inprogresscf': inprogresscf, - 'title': 'Commitfests', - 'header_activity': 'Activity log', - 'header_activity_link': '/activity/', - }) + inprogresscf = next( + (c for c in commitfests if c.status == CommitFest.STATUS_INPROGRESS), None + ) + + return render( + request, + "home.html", + { + "commitfests": commitfests, + "opencf": opencf, + "inprogresscf": inprogresscf, + "title": "Commitfests", + "header_activity": "Activity log", + "header_activity_link": "/activity/", + }, + ) def activity(request, cfid=None, rss=None): @@ -56,12 +78,16 @@ def activity(request, cfid=None, rss=None): # we're evil. And also because the number has been verified # when looking up the cf itself, so nothing can be injected # there. - where = 'WHERE EXISTS (SELECT 1 FROM commitfest_patchoncommitfest poc2 WHERE poc2.patch_id=p.id AND poc2.commitfest_id={0})'.format(cf.id) + where = "WHERE EXISTS (SELECT 1 FROM commitfest_patchoncommitfest poc2 WHERE poc2.patch_id=p.id AND poc2.commitfest_id={0})".format( + cf.id + ) else: cf = None - where = '' + where = "" - sql = "SELECT ph.date, auth_user.username AS by, ph.what, p.id AS patchid, p.name, (SELECT max(commitfest_id) FROM commitfest_patchoncommitfest poc WHERE poc.patch_id=p.id) AS cfid FROM commitfest_patchhistory ph INNER JOIN commitfest_patch p ON ph.patch_id=p.id INNER JOIN auth_user on auth_user.id=ph.by_id {0} ORDER BY ph.date DESC LIMIT {1}".format(where, num) + sql = "SELECT ph.date, auth_user.username AS by, ph.what, p.id AS patchid, p.name, (SELECT max(commitfest_id) FROM commitfest_patchoncommitfest poc WHERE poc.patch_id=p.id) AS cfid FROM commitfest_patchhistory ph INNER JOIN commitfest_patch p ON ph.patch_id=p.id INNER JOIN auth_user on auth_user.id=ph.by_id {0} ORDER BY ph.date DESC LIMIT {1}".format( + where, num + ) curs = connection.cursor() curs.execute(sql) @@ -72,22 +98,32 @@ def activity(request, cfid=None, rss=None): return ActivityFeed(activity, cf)(request) else: # Return regular webpage - return render(request, 'activity.html', { - 'commitfest': cf, - 'activity': activity, - 'title': cf and 'Commitfest activity' or 'Global Commitfest activity', - 'rss_alternate': cf and '/{0}/activity.rss/'.format(cf.id) or '/activity.rss/', - 'rss_alternate_title': 'PostgreSQL Commitfest Activity Log', - 'breadcrumbs': cf and [{'title': cf.title, 'href': '/%s/' % cf.pk}, ] or None, - }) + return render( + request, + "activity.html", + { + "commitfest": cf, + "activity": activity, + "title": cf and "Commitfest activity" or "Global Commitfest activity", + "rss_alternate": cf + and "/{0}/activity.rss/".format(cf.id) + or "/activity.rss/", + "rss_alternate_title": "PostgreSQL Commitfest Activity Log", + "breadcrumbs": cf + and [ + {"title": cf.title, "href": "/%s/" % cf.pk}, + ] + or None, + }, + ) def redir(request, what, end): - if what == 'open': + if what == "open": cfs = list(CommitFest.objects.filter(status=CommitFest.STATUS_OPEN)) - elif what == 'inprogress': + elif what == "inprogress": cfs = list(CommitFest.objects.filter(status=CommitFest.STATUS_INPROGRESS)) - elif what == 'current': + elif what == "current": cfs = list(CommitFest.objects.filter(status=CommitFest.STATUS_INPROGRESS)) if len(cfs) == 0: cfs = list(CommitFest.objects.filter(status=CommitFest.STATUS_OPEN)) @@ -95,15 +131,22 @@ def redir(request, what, end): raise Http404() if len(cfs) == 0: - messages.warning(request, "No {0} commitfests exist, redirecting to startpage.".format(what)) + messages.warning( + request, "No {0} commitfests exist, redirecting to startpage.".format(what) + ) return HttpResponseRedirect("/") if len(cfs) != 1: - messages.warning(request, "More than one {0} commitfest exists, redirecting to startpage instead.".format(what)) + messages.warning( + request, + "More than one {0} commitfest exists, redirecting to startpage instead.".format( + what + ), + ) return HttpResponseRedirect("/") query_string = request.GET.urlencode() if query_string: - query_string = '?' + query_string + query_string = "?" + query_string return HttpResponseRedirect(f"/{cfs[0].id}/{end}{query_string}") @@ -114,146 +157,226 @@ def commitfest(request, cfid): # Build a dynamic filter based on the filtering options entered whereclauses = [] whereparams = {} - if request.GET.get('status', '-1') != '-1': + if request.GET.get("status", "-1") != "-1": try: - whereparams['status'] = int(request.GET['status']) + whereparams["status"] = int(request.GET["status"]) whereclauses.append("poc.status=%(status)s") except ValueError: # int() failed -- so just ignore this filter pass - if request.GET.get('targetversion', '-1') != '-1': - if request.GET['targetversion'] == '-2': + if request.GET.get("targetversion", "-1") != "-1": + if request.GET["targetversion"] == "-2": whereclauses.append("targetversion_id IS NULL") else: try: - whereparams['verid'] = int(request.GET['targetversion']) + whereparams["verid"] = int(request.GET["targetversion"]) whereclauses.append("targetversion_id=%(verid)s") except ValueError: # int() failed, ignore pass - if request.GET.get('author', '-1') != '-1': - if request.GET['author'] == '-2': - whereclauses.append("NOT EXISTS (SELECT 1 FROM commitfest_patch_authors cpa WHERE cpa.patch_id=p.id)") - elif request.GET['author'] == '-3': + if request.GET.get("author", "-1") != "-1": + if request.GET["author"] == "-2": + whereclauses.append( + "NOT EXISTS (SELECT 1 FROM commitfest_patch_authors cpa WHERE cpa.patch_id=p.id)" + ) + elif request.GET["author"] == "-3": # Checking for "yourself" requires the user to be logged in! if not request.user.is_authenticated: - return HttpResponseRedirect('%s?next=%s' % (settings.LOGIN_URL, request.path)) - whereclauses.append("EXISTS (SELECT 1 FROM commitfest_patch_authors cpa WHERE cpa.patch_id=p.id AND cpa.user_id=%(self)s)") - whereparams['self'] = request.user.id + return HttpResponseRedirect( + "%s?next=%s" % (settings.LOGIN_URL, request.path) + ) + whereclauses.append( + "EXISTS (SELECT 1 FROM commitfest_patch_authors cpa WHERE cpa.patch_id=p.id AND cpa.user_id=%(self)s)" + ) + whereparams["self"] = request.user.id else: try: - whereparams['author'] = int(request.GET['author']) - whereclauses.append("EXISTS (SELECT 1 FROM commitfest_patch_authors cpa WHERE cpa.patch_id=p.id AND cpa.user_id=%(author)s)") + whereparams["author"] = int(request.GET["author"]) + whereclauses.append( + "EXISTS (SELECT 1 FROM commitfest_patch_authors cpa WHERE cpa.patch_id=p.id AND cpa.user_id=%(author)s)" + ) except ValueError: # int() failed -- so just ignore this filter pass - if request.GET.get('reviewer', '-1') != '-1': - if request.GET['reviewer'] == '-2': - whereclauses.append("NOT EXISTS (SELECT 1 FROM commitfest_patch_reviewers cpr WHERE cpr.patch_id=p.id)") - elif request.GET['reviewer'] == '-3': + if request.GET.get("reviewer", "-1") != "-1": + if request.GET["reviewer"] == "-2": + whereclauses.append( + "NOT EXISTS (SELECT 1 FROM commitfest_patch_reviewers cpr WHERE cpr.patch_id=p.id)" + ) + elif request.GET["reviewer"] == "-3": # Checking for "yourself" requires the user to be logged in! if not request.user.is_authenticated: - return HttpResponseRedirect('%s?next=%s' % (settings.LOGIN_URL, request.path)) - whereclauses.append("EXISTS (SELECT 1 FROM commitfest_patch_reviewers cpr WHERE cpr.patch_id=p.id AND cpr.user_id=%(self)s)") - whereparams['self'] = request.user.id + return HttpResponseRedirect( + "%s?next=%s" % (settings.LOGIN_URL, request.path) + ) + whereclauses.append( + "EXISTS (SELECT 1 FROM commitfest_patch_reviewers cpr WHERE cpr.patch_id=p.id AND cpr.user_id=%(self)s)" + ) + whereparams["self"] = request.user.id else: try: - whereparams['reviewer'] = int(request.GET['reviewer']) - whereclauses.append("EXISTS (SELECT 1 FROM commitfest_patch_reviewers cpr WHERE cpr.patch_id=p.id AND cpr.user_id=%(reviewer)s)") + whereparams["reviewer"] = int(request.GET["reviewer"]) + whereclauses.append( + "EXISTS (SELECT 1 FROM commitfest_patch_reviewers cpr WHERE cpr.patch_id=p.id AND cpr.user_id=%(reviewer)s)" + ) except ValueError: # int() failed -- so just ignore this filter pass - if request.GET.get('text', '') != '': + if request.GET.get("text", "") != "": whereclauses.append("p.name ILIKE '%%' || %(txt)s || '%%'") - whereparams['txt'] = request.GET['text'] + whereparams["txt"] = request.GET["text"] has_filter = len(whereclauses) > 0 # Figure out custom ordering - if request.GET.get('sortkey', '') != '': + if request.GET.get("sortkey", "") != "": try: - sortkey = int(request.GET['sortkey']) + sortkey = int(request.GET["sortkey"]) except ValueError: sortkey = 0 if sortkey == 1: - orderby_str = 'modified, created' + orderby_str = "modified, created" + elif sortkey == -1: + orderby_str = "modified DESC, created DESC" elif sortkey == 2: - orderby_str = 'lastmail, created' + orderby_str = "lastmail, created" + elif sortkey == -2: + orderby_str = "lastmail DESC, created DESC" elif sortkey == 3: - orderby_str = 'num_cfs DESC, modified, created' + orderby_str = "num_cfs DESC, modified, created" + elif sortkey == -3: + orderby_str = "num_cfs ASC, modified DESC, created DESC" + elif sortkey == 4: + orderby_str = "p.id" + elif sortkey == -4: + orderby_str = "p.id DESC" + elif sortkey == 5: + orderby_str = "p.name, created" + elif sortkey == -5: + orderby_str = "p.name DESC, created DESC" + elif sortkey == 6: + orderby_str = ( + "branch.all_additions + branch.all_deletions NULLS LAST, created" + ) + elif sortkey == -6: + orderby_str = ( + "branch.all_additions + branch.all_deletions DESC NULLS LAST, created" + ) else: - orderby_str = 'p.id' + orderby_str = "p.id" sortkey = 0 else: - orderby_str = 'topic, created' + orderby_str = "topic, created" sortkey = 0 if not has_filter and sortkey == 0 and request.GET: # Redirect to get rid of the ugly url - return HttpResponseRedirect('/%s/' % cf.id) + return HttpResponseRedirect("/%s/" % cf.id) if whereclauses: - where_str = 'AND ({0})'.format(' AND '.join(whereclauses)) + where_str = "AND ({0})".format(" AND ".join(whereclauses)) else: - where_str = '' + where_str = "" params = { - 'cid': cf.id, - 'openstatuses': PatchOnCommitFest.OPEN_STATUSES, + "cid": cf.id, + "openstatuses": PatchOnCommitFest.OPEN_STATUSES, } params.update(whereparams) # Let's not overload the poor django ORM curs = connection.cursor() - curs.execute("""SELECT p.id, p.name, poc.status, v.version AS targetversion, p.created, p.modified, p.lastmail, committer.username AS committer, t.topic, + curs.execute( + """SELECT p.id, p.name, poc.status, v.version AS targetversion, p.created, p.modified, p.lastmail, committer.username AS committer, t.topic, (poc.status=ANY(%(openstatuses)s)) AS is_open, (SELECT string_agg(first_name || ' ' || last_name || ' (' || username || ')', ', ') FROM auth_user INNER JOIN commitfest_patch_authors cpa ON cpa.user_id=auth_user.id WHERE cpa.patch_id=p.id) AS author_names, (SELECT string_agg(first_name || ' ' || last_name || ' (' || username || ')', ', ') FROM auth_user INNER JOIN commitfest_patch_reviewers cpr ON cpr.user_id=auth_user.id WHERE cpr.patch_id=p.id) AS reviewer_names, -(SELECT count(1) FROM commitfest_patchoncommitfest pcf WHERE pcf.patch_id=p.id) AS num_cfs +(SELECT count(1) FROM commitfest_patchoncommitfest pcf WHERE pcf.patch_id=p.id) AS num_cfs, +( + SELECT row_to_json(t) as cfbot_results + from ( + SELECT + count(*) FILTER (WHERE task.status = 'COMPLETED') as completed, + count(*) FILTER (WHERE task.status in ('CREATED', 'SCHEDULED', 'EXECUTING')) running, + count(*) FILTER (WHERE task.status in ('ABORTED', 'ERRORED', 'FAILED')) failed, + count(*) total, + string_agg(task.task_name, ', ') FILTER (WHERE task.status in ('ABORTED', 'ERRORED', 'FAILED')) as failed_task_names, + branch.commit_id IS NULL as needs_rebase, + branch.apply_url, + branch.patch_count, + branch.first_additions, + branch.first_deletions, + branch.all_additions, + branch.all_deletions + FROM commitfest_cfbotbranch branch + LEFT JOIN commitfest_cfbottask task ON task.branch_id = branch.branch_id + WHERE branch.patch_id=p.id + GROUP BY branch.patch_id + ) t +) FROM commitfest_patch p INNER JOIN commitfest_patchoncommitfest poc ON poc.patch_id=p.id INNER JOIN commitfest_topic t ON t.id=p.topic_id LEFT JOIN auth_user committer ON committer.id=p.committer_id LEFT JOIN commitfest_targetversion v ON p.targetversion_id=v.id +LEFT JOIN commitfest_cfbotbranch branch ON branch.patch_id=p.id WHERE poc.commitfest_id=%(cid)s {0} -GROUP BY p.id, poc.id, committer.id, t.id, v.version -ORDER BY is_open DESC, {1}""".format(where_str, orderby_str), params) - patches = [dict(zip([col[0] for col in curs.description], row)) for row in curs.fetchall()] +GROUP BY p.id, poc.id, committer.id, t.id, v.version, branch.patch_id +ORDER BY is_open DESC, {1}""".format(where_str, orderby_str), + params, + ) + patches = [ + dict(zip([col[0] for col in curs.description], row)) for row in curs.fetchall() + ] # Generate patch status summary. curs = connection.cursor() - curs.execute("SELECT ps.status, ps.statusstring, count(*) FROM commitfest_patchoncommitfest poc INNER JOIN commitfest_patchstatus ps ON ps.status=poc.status WHERE commitfest_id=%(id)s GROUP BY ps.status ORDER BY ps.sortkey", { - 'id': cf.id, - }) + curs.execute( + "SELECT ps.status, ps.statusstring, count(*) FROM commitfest_patchoncommitfest poc INNER JOIN commitfest_patchstatus ps ON ps.status=poc.status WHERE commitfest_id=%(id)s GROUP BY ps.status ORDER BY ps.sortkey", + { + "id": cf.id, + }, + ) statussummary = curs.fetchall() - statussummary.append([-1, 'Total', sum((r[2] for r in statussummary))]) + statussummary.append([-1, "Total", sum((r[2] for r in statussummary))]) # Generates a fairly expensive query, which we shouldn't do unless # the user is logged in. XXX: Figure out how to avoid doing that.. form = CommitFestFilterForm(cf, request.GET) - return render(request, 'commitfest.html', { - 'cf': cf, - 'form': form, - 'patches': patches, - 'statussummary': statussummary, - 'has_filter': has_filter, - 'title': cf.title, - 'grouping': sortkey == 0, - 'sortkey': sortkey, - 'openpatchids': [p['id'] for p in patches if p['is_open']], - 'header_activity': 'Activity log', - 'header_activity_link': 'activity/', - }) + return render( + request, + "commitfest.html", + { + "cf": cf, + "form": form, + "patches": patches, + "statussummary": statussummary, + "has_filter": has_filter, + "title": cf.title, + "grouping": sortkey == 0, + "sortkey": sortkey, + "openpatchids": [p["id"] for p in patches if p["is_open"]], + "header_activity": "Activity log", + "header_activity_link": "activity/", + }, + ) def patches_by_messageid(messageid): # First try to find the messageid in our database - patches = Patch.objects.select_related().filter(mailthread__messageid=messageid).order_by('created', ).all() + patches = ( + Patch.objects.select_related() + .filter(mailthread_set__messageid=messageid) + .order_by( + "created", + ) + .all() + ) if patches: return patches @@ -261,55 +384,84 @@ def patches_by_messageid(messageid): # If it's not there, try to find it in the archives try: - thread = _archivesAPI(f'/message-id.json/{urlsafe_messageid}') + thread = _archivesAPI(f"/message-id.json/{urlsafe_messageid}") except Http404: return [] if len(thread) == 0: return [] - first_email = min(thread, key=lambda x: x['date']) + first_email = min(thread, key=lambda x: x["date"]) - return Patch.objects.select_related().filter(mailthread__messageid=first_email['msgid']).order_by('created',).all() + return ( + Patch.objects.select_related() + .filter(mailthread_set__messageid=first_email["msgid"]) + .order_by( + "created", + ) + .all() + ) def global_search(request): - if 'searchterm' not in request.GET: - return HttpResponseRedirect('/') - searchterm = request.GET['searchterm'].strip() + if "searchterm" not in request.GET: + return HttpResponseRedirect("/") + searchterm = request.GET["searchterm"].strip() patches = [] - if '@' in searchterm: + if "@" in searchterm: # This is probably a messageid, so let's try to look up patches related # to it. Let's first remove any < and > around it though. - cleaned_id = searchterm.removeprefix('<').removesuffix('>') + cleaned_id = searchterm.removeprefix("<").removesuffix(">") patches = patches_by_messageid(cleaned_id) if not patches: - patches = Patch.objects.select_related().filter(name__icontains=searchterm).order_by('created',).all() + patches = ( + Patch.objects.select_related() + .filter(name__icontains=searchterm) + .order_by( + "created", + ) + .all() + ) if len(patches) == 1: patch = patches[0] - return HttpResponseRedirect(f'/patch/{patch.id}/') + return HttpResponseRedirect(f"/patch/{patch.id}/") - return render(request, 'patchsearch.html', { - 'patches': patches, - 'title': 'Patch search results', - }) + return render( + request, + "patchsearch.html", + { + "patches": patches, + "title": "Patch search results", + }, + ) -def patch_redirect(request, patchid): - last_commitfest = PatchOnCommitFest.objects.select_related('commitfest').filter(patch_id=patchid).order_by('-commitfest__startdate').first() - if not last_commitfest: - raise Http404("Patch not found") - return HttpResponseRedirect(f'/{last_commitfest.commitfest_id}/{patchid}/') +def patch_legacy_redirect(request, cfid, patchid): + # Previously we would include the commitfest id in the URL. This is no + # longer the case. + return HttpResponseRedirect(f"/patch/{patchid}/") -def patch(request, cfid, patchid): - cf = get_object_or_404(CommitFest, pk=cfid) - patch = get_object_or_404(Patch.objects.select_related(), pk=patchid, commitfests=cf) - patch_commitfests = PatchOnCommitFest.objects.select_related('commitfest').filter(patch=patch).order_by('-commitfest__startdate') - committers = Committer.objects.filter(active=True).order_by('user__last_name', 'user__first_name') +def patch(request, patchid): + patch = get_object_or_404(Patch.objects.select_related(), pk=patchid) + + patch_commitfests = ( + PatchOnCommitFest.objects.select_related("commitfest") + .filter(patch=patch) + .order_by("-commitfest__startdate") + .all() + ) + cf = patch_commitfests[0].commitfest + + committers = Committer.objects.filter(active=True).order_by( + "user__last_name", "user__first_name" + ) + + cfbot_branch = getattr(patch, "cfbot_branch", None) + cfbot_tasks = patch.cfbot_tasks.order_by("position") if cfbot_branch else [] # XXX: this creates a session, so find a smarter way. Probably handle # it in the callback and just ask the user then? @@ -329,32 +481,40 @@ def patch(request, cfid, patchid): is_reviewer = False is_subscribed = False - return render(request, 'patch.html', { - 'cf': cf, - 'patch': patch, - 'patch_commitfests': patch_commitfests, - 'is_committer': is_committer, - 'is_this_committer': is_this_committer, - 'is_reviewer': is_reviewer, - 'is_subscribed': is_subscribed, - 'committers': committers, - 'attachnow': 'attachthreadnow' in request.GET, - 'title': patch.name, - 'breadcrumbs': [{'title': cf.title, 'href': '/%s/' % cf.pk}, ], - }) + return render( + request, + "patch.html", + { + "cf": cf, + "patch": patch, + "patch_commitfests": patch_commitfests, + "cfbot_branch": cfbot_branch, + "cfbot_tasks": cfbot_tasks, + "is_committer": is_committer, + "is_this_committer": is_this_committer, + "is_reviewer": is_reviewer, + "is_subscribed": is_subscribed, + "committers": committers, + "attachnow": "attachthreadnow" in request.GET, + "title": patch.name, + "breadcrumbs": [ + {"title": cf.title, "href": "/%s/" % cf.pk}, + ], + }, + ) @login_required @transaction.atomic -def patchform(request, cfid, patchid): - cf = get_object_or_404(CommitFest, pk=cfid) - patch = get_object_or_404(Patch, pk=patchid, commitfests=cf) +def patchform(request, patchid): + patch = get_object_or_404(Patch, pk=patchid) + cf = patch.current_commitfest() prevreviewers = list(patch.reviewers.all()) prevauthors = list(patch.authors.all()) prevcommitter = patch.committer - if request.method == 'POST': + if request.method == "POST": form = PatchForm(data=request.POST, instance=patch) if form.is_valid(): # Some fields need to be set when creating a new one @@ -365,23 +525,37 @@ def patchform(request, cfid, patchid): # Track all changes for field, values in r.diff.items(): - PatchHistory(patch=patch, by=request.user, what='Changed %s to %s' % (field, values[1])).save_and_notify(prevcommitter=prevcommitter, prevreviewers=prevreviewers, prevauthors=prevauthors) + PatchHistory( + patch=patch, + by=request.user, + what="Changed %s to %s" % (field, values[1]), + ).save_and_notify( + prevcommitter=prevcommitter, + prevreviewers=prevreviewers, + prevauthors=prevauthors, + ) r.set_modified() r.save() - return HttpResponseRedirect('../../%s/' % r.pk) + return HttpResponseRedirect("../../%s/" % r.pk) # Else fall through and render the page again else: form = PatchForm(instance=patch) - return render(request, 'base_form.html', { - 'cf': cf, - 'form': form, - 'patch': patch, - 'title': 'Edit patch', - 'selectize_multiple_fields': form.selectize_multiple_fields.items(), - 'breadcrumbs': [{'title': cf.title, 'href': '/%s/' % cf.pk}, - {'title': 'View patch', 'href': '/%s/%s/' % (cf.pk, patch.pk)}], - }) + return render( + request, + "base_form.html", + { + "cf": cf, + "form": form, + "patch": patch, + "title": "Edit patch", + "selectize_multiple_fields": form.selectize_multiple_fields.items(), + "breadcrumbs": [ + {"title": cf.title, "href": "/%s/" % cf.pk}, + {"title": "View patch", "href": "/%s/%s/" % (cf.pk, patch.pk)}, + ], + }, + ) @login_required @@ -391,44 +565,62 @@ def newpatch(request, cfid): if not cf.status == CommitFest.STATUS_OPEN and not request.user.is_staff: raise Http404("This commitfest is not open!") - if request.method == 'POST': + if request.method == "POST": form = NewPatchForm(data=request.POST) if form.is_valid(): - patch = Patch(name=form.cleaned_data['name'], - topic=form.cleaned_data['topic']) + patch = Patch( + name=form.cleaned_data["name"], topic=form.cleaned_data["topic"] + ) patch.set_modified() patch.save() - poc = PatchOnCommitFest(patch=patch, commitfest=cf, enterdate=datetime.now()) + poc = PatchOnCommitFest( + patch=patch, commitfest=cf, enterdate=datetime.now() + ) poc.save() - PatchHistory(patch=patch, by=request.user, what='Created patch record').save() + PatchHistory( + patch=patch, by=request.user, what="Created patch record" + ).save() # Now add the thread try: - doAttachThread(cf, patch, form.cleaned_data['threadmsgid'], request.user) - return HttpResponseRedirect("/%s/%s/edit/" % (cf.id, patch.id)) + doAttachThread( + cf, patch, form.cleaned_data["threadmsgid"], request.user + ) + return HttpResponseRedirect("/patch/%s/" % (patch.id,)) except Http404: # Thread not found! # This is a horrible breakage of API layers - form._errors['threadmsgid'] = form.error_class(('Selected thread did not exist in the archives',)) + form._errors["threadmsgid"] = form.error_class( + ("Selected thread did not exist in the archives",) + ) except Exception: - form._errors['threadmsgid'] = form.error_class(('An error occurred looking up the thread in the archives.',)) + form._errors["threadmsgid"] = form.error_class( + ("An error occurred looking up the thread in the archives.",) + ) # In this case, we have created a patch - delete it. This causes a agp in id's, but it should # not happen very often. If we successfully attached to it, we will have already returned. patch.delete() else: - form = NewPatchForm() - - return render(request, 'base_form.html', { - 'form': form, - 'title': 'New patch', - 'breadcrumbs': [{'title': cf.title, 'href': '/%s/' % cf.pk}, ], - 'savebutton': 'Create patch', - 'threadbrowse': True, - }) + form = NewPatchForm(request=request) + + return render( + request, + "base_form.html", + { + "form": form, + "title": "New patch", + "breadcrumbs": [ + {"title": cf.title, "href": "/%s/" % cf.pk}, + ], + "savebutton": "Create patch", + "selectize_multiple_fields": form.selectize_multiple_fields.items(), + "threadbrowse": True, + }, + ) def _review_status_string(reviewstatus): - if '0' in reviewstatus: - if '1' in reviewstatus: + if "0" in reviewstatus: + if "1" in reviewstatus: return "tested, passed" else: return "tested, failed" @@ -438,67 +630,81 @@ def _review_status_string(reviewstatus): @login_required @transaction.atomic -def comment(request, cfid, patchid, what): - cf = get_object_or_404(CommitFest, pk=cfid) +def comment(request, patchid, what): patch = get_object_or_404(Patch, pk=patchid) + cf = patch.current_commitfest() poc = get_object_or_404(PatchOnCommitFest, patch=patch, commitfest=cf) - is_review = (what == 'review') - - if poc.is_closed: - # We allow modification of patches in closed CFs *only* if it's the - # last CF that the patch is part of. If it's part of another CF, that - # is later than this one, tell the user to go there instead. - lastcf = PatchOnCommitFest.objects.filter(patch=patch).order_by('-commitfest__startdate')[0] - if poc != lastcf: - messages.add_message(request, messages.INFO, "The status of this patch cannot be changed in this commitfest. You must modify it in the one where it's open!") - return HttpResponseRedirect('..') - - if request.method == 'POST': + is_review = what == "review" + + if request.method == "POST": try: form = CommentForm(patch, poc, is_review, data=request.POST) except Exception as e: - messages.add_message(request, messages.ERROR, "Failed to build list of response options from the archives: %s" % e) - return HttpResponseRedirect('/%s/%s/' % (cf.id, patch.id)) + messages.add_message( + request, + messages.ERROR, + "Failed to build list of response options from the archives: %s" % e, + ) + return HttpResponseRedirect("/%s/%s/" % (cf.id, patch.id)) if form.is_valid(): if is_review: - txt = "The following review has been posted through the commitfest application:\n%s\n\n%s" % ( - "\n".join(["%-25s %s" % (f.label + ':', _review_status_string(form.cleaned_data[fn])) for (fn, f) in form.fields.items() if fn.startswith('review_')]), - form.cleaned_data['message'] + txt = ( + "The following review has been posted through the commitfest application:\n%s\n\n%s" + % ( + "\n".join( + [ + "%-25s %s" + % ( + f.label + ":", + _review_status_string(form.cleaned_data[fn]), + ) + for (fn, f) in form.fields.items() + if fn.startswith("review_") + ] + ), + form.cleaned_data["message"], + ) ) else: - txt = form.cleaned_data['message'] + txt = form.cleaned_data["message"] - if int(form.cleaned_data['newstatus']) != poc.status: - poc.status = int(form.cleaned_data['newstatus']) + if int(form.cleaned_data["newstatus"]) != poc.status: + poc.status = int(form.cleaned_data["newstatus"]) poc.save() - PatchHistory(patch=poc.patch, by=request.user, what='New status: %s' % poc.statusstring).save_and_notify() + PatchHistory( + patch=poc.patch, + by=request.user, + what="New status: %s" % poc.statusstring, + ).save_and_notify() txt += "\n\nThe new status of this patch is: %s\n" % poc.statusstring - msg = MIMEText(txt, _charset='utf-8') + msg = MIMEText(txt, _charset="utf-8") - if form.thread.subject.startswith('Re:'): - msg['Subject'] = form.thread.subject + if form.thread.subject.startswith("Re:"): + msg["Subject"] = form.thread.subject else: - msg['Subject'] = 'Re: %s' % form.thread.subject + msg["Subject"] = "Re: %s" % form.thread.subject - msg['To'] = settings.HACKERS_EMAIL - msg['From'] = UserWrapper(request.user).encoded_email_header + msg["To"] = settings.HACKERS_EMAIL + msg["From"] = UserWrapper(request.user).encoded_email_header # CC the authors of a patch, if there are any authors = list(patch.authors.all()) if len(authors): - msg['Cc'] = ", ".join([UserWrapper(a).encoded_email_header for a in authors]) + msg["Cc"] = ", ".join( + [UserWrapper(a).encoded_email_header for a in authors] + ) - msg['Date'] = formatdate(localtime=True) - msg['User-Agent'] = 'pgcommitfest' - msg['X-cfsender'] = request.user.username - msg['In-Reply-To'] = '<%s>' % form.respid + msg["Date"] = formatdate(localtime=True) + msg["User-Agent"] = "pgcommitfest" + msg["X-cfsender"] = request.user.username + msg["In-Reply-To"] = "<%s>" % form.respid # We just add the "top" messageid and the one we're responding to. # This along with in-reply-to should indicate clearly enough where # in the thread the message belongs. - msg['References'] = '<%s> <%s>' % (form.thread.messageid, form.respid) - msg['Message-ID'] = make_msgid('pgcf') + msg["References"] = "<%s> <%s>" % (form.thread.messageid, form.respid) + msg["Message-ID"] = make_msgid("pgcf") uw = UserWrapper(request.user) msgstring = msg.as_string() @@ -508,50 +714,67 @@ def comment(request, cfid, patchid, what): # make it deliver the email... send_mail(uw.email, UserWrapper(a).email, msgstring) - PatchHistory(patch=patch, by=request.user, what='Posted %s with messageid %s' % (what, msg['Message-ID'])).save() + PatchHistory( + patch=patch, + by=request.user, + what="Posted %s with messageid %s" % (what, msg["Message-ID"]), + ).save() - messages.add_message(request, messages.INFO, "Your email has been queued for %s, and will be sent within a few minutes." % (settings.HACKERS_EMAIL)) + messages.add_message( + request, + messages.INFO, + "Your email has been queued for %s, and will be sent within a few minutes." + % (settings.HACKERS_EMAIL), + ) - return HttpResponseRedirect('/%s/%s/' % (cf.id, patch.id)) + return HttpResponseRedirect("/%s/%s/" % (cf.id, patch.id)) else: try: form = CommentForm(patch, poc, is_review) except Exception as e: - messages.add_message(request, messages.ERROR, "Failed to build list of response options from the archives: %s" % e) - return HttpResponseRedirect('/%s/%s/' % (cf.id, patch.id)) - - return render(request, 'base_form.html', { - 'cf': cf, - 'form': form, - 'patch': patch, - 'extraformclass': 'patchcommentform', - 'breadcrumbs': [{'title': cf.title, 'href': '/%s/' % cf.pk}, - {'title': 'View patch', 'href': '/%s/%s/' % (cf.pk, patch.pk)}], - 'title': "Add %s" % what, - 'note': 'Note! This form will generate an email to the public mailinglist %s, with sender set to %s!
Please ensure that the email settings for your domain (DKIM, SPF) allow emails from external sources.' % (settings.HACKERS_EMAIL, UserWrapper(request.user).email), - 'savebutton': 'Send %s' % what, - }) + messages.add_message( + request, + messages.ERROR, + "Failed to build list of response options from the archives: %s" % e, + ) + return HttpResponseRedirect("/%s/%s/" % (cf.id, patch.id)) + + return render( + request, + "base_form.html", + { + "cf": cf, + "form": form, + "patch": patch, + "extraformclass": "patchcommentform", + "breadcrumbs": [ + {"title": cf.title, "href": "/%s/" % cf.pk}, + {"title": "View patch", "href": "/%s/%s/" % (cf.pk, patch.pk)}, + ], + "title": "Add %s" % what, + "note": 'Note! This form will generate an email to the public mailinglist %s, with sender set to %s!
Please ensure that the email settings for your domain (DKIM, SPF) allow emails from external sources.' + % (settings.HACKERS_EMAIL, UserWrapper(request.user).email), + "savebutton": "Send %s" % what, + }, + ) @login_required @transaction.atomic -def status(request, cfid, patchid, status): - poc = get_object_or_404(PatchOnCommitFest.objects.select_related(), commitfest__id=cfid, patch__id=patchid) - - if poc.is_closed: - # We allow modification of patches in closed CFs *only* if it's the - # last CF that the patch is part of. If it's part of another CF, that - # is later than this one, tell the user to go there instead. - lastcf = PatchOnCommitFest.objects.filter(patch__id=patchid).order_by('-commitfest__startdate')[0] - if poc != lastcf: - messages.add_message(request, messages.INFO, "The status of this patch cannot be changed in this commitfest. You must modify it in the one where it's open!") - return HttpResponseRedirect('/%s/%s/' % (poc.commitfest.id, poc.patch.id)) - - if status == 'review': +def status(request, patchid, status): + patch = get_object_or_404(Patch.objects.select_related(), pk=patchid) + cf = patch.current_commitfest() + poc = get_object_or_404( + PatchOnCommitFest.objects.select_related(), + commitfest__id=cf.id, + patch__id=patchid, + ) + + if status == "review": newstatus = PatchOnCommitFest.STATUS_REVIEW - elif status == 'author': + elif status == "author": newstatus = PatchOnCommitFest.STATUS_AUTHOR - elif status == 'committer': + elif status == "committer": newstatus = PatchOnCommitFest.STATUS_COMMITTER else: raise Exception("Can't happen") @@ -563,48 +786,73 @@ def status(request, cfid, patchid, status): poc.patch.save() poc.save() - PatchHistory(patch=poc.patch, by=request.user, what='New status: %s' % poc.statusstring).save_and_notify() + PatchHistory( + patch=poc.patch, by=request.user, what="New status: %s" % poc.statusstring + ).save_and_notify() - return HttpResponseRedirect('/%s/%s/' % (poc.commitfest.id, poc.patch.id)) + return HttpResponseRedirect("/patch/%s/" % (poc.patch.id)) @login_required @transaction.atomic -def close(request, cfid, patchid, status): - poc = get_object_or_404(PatchOnCommitFest.objects.select_related(), commitfest__id=cfid, patch__id=patchid) - - if poc.is_closed: - # We allow modification of patches in closed CFs *only* if it's the - # last CF that the patch is part of. If it's part of another CF, that - # is later than this one, tell the user to go there instead. - lastcf = PatchOnCommitFest.objects.filter(patch__id=patchid).order_by('-commitfest__startdate')[0] - if poc != lastcf: - messages.add_message(request, messages.INFO, "The status of this patch cannot be changed in this commitfest. You must modify it in the one where it's open!") - return HttpResponseRedirect('/%s/%s/' % (poc.commitfest.id, poc.patch.id)) +def close(request, patchid, status): + patch = get_object_or_404(Patch.objects.select_related(), pk=patchid) + cf = patch.current_commitfest() + + try: + request_cfid = int(request.GET.get("cfid", "")) + except ValueError: + # int() failed, ignore + request_cfid = None + + if request_cfid is not None and request_cfid != cf.id: + # The cfid parameter is only added to the /next/ link. That's the only + # close operation where two people pressing the button at the same time + # can have unintended effects. + messages.error( + request, + "The patch was moved to a new commitfest by someone else. Please double check if you still want to retry this operation.", + ) + return HttpResponseRedirect("/%s/%s/" % (cf.id, patch.id)) + + poc = get_object_or_404( + PatchOnCommitFest.objects.select_related(), + commitfest__id=cf.id, + patch__id=patchid, + ) poc.leavedate = datetime.now() # We know the status can't be one of the ones below, since we # have checked that we're not closed yet. Therefor, we don't # need to check if the individual status has changed. - if status == 'reject': + if status == "reject": poc.status = PatchOnCommitFest.STATUS_REJECTED - elif status == 'withdrawn': + elif status == "withdrawn": poc.status = PatchOnCommitFest.STATUS_WITHDRAWN - elif status == 'feedback': + elif status == "feedback": poc.status = PatchOnCommitFest.STATUS_RETURNED - elif status == 'next': + elif status == "next": # Only some patch statuses can actually be moved. - if poc.status in (PatchOnCommitFest.STATUS_COMMITTED, - PatchOnCommitFest.STATUS_NEXT, - PatchOnCommitFest.STATUS_RETURNED, - PatchOnCommitFest.STATUS_REJECTED): + if poc.status in ( + PatchOnCommitFest.STATUS_COMMITTED, + PatchOnCommitFest.STATUS_NEXT, + PatchOnCommitFest.STATUS_RETURNED, + PatchOnCommitFest.STATUS_REJECTED, + ): # Can't be moved! - messages.error(request, "A patch in status {0} cannot be moved to next commitfest.".format(poc.statusstring)) - return HttpResponseRedirect('/%s/%s/' % (poc.commitfest.id, poc.patch.id)) - elif poc.status in (PatchOnCommitFest.STATUS_REVIEW, - PatchOnCommitFest.STATUS_AUTHOR, - PatchOnCommitFest.STATUS_COMMITTER): + messages.error( + request, + "A patch in status {0} cannot be moved to next commitfest.".format( + poc.statusstring + ), + ) + return HttpResponseRedirect("/%s/%s/" % (poc.commitfest.id, poc.patch.id)) + elif poc.status in ( + PatchOnCommitFest.STATUS_REVIEW, + PatchOnCommitFest.STATUS_AUTHOR, + PatchOnCommitFest.STATUS_COMMITTER, + ): # This one can be moved pass else: @@ -621,38 +869,60 @@ def close(request, cfid, patchid, status): newcf = CommitFest.objects.filter(status=CommitFest.STATUS_FUTURE) if len(newcf) == 0: messages.error(request, "No open and no future commitfest exists!") - return HttpResponseRedirect('/%s/%s/' % (poc.commitfest.id, poc.patch.id)) + return HttpResponseRedirect( + "/%s/%s/" % (poc.commitfest.id, poc.patch.id) + ) elif len(newcf) != 1: - messages.error(request, "No open and multiple future commitfests exist!") - return HttpResponseRedirect('/%s/%s/' % (poc.commitfest.id, poc.patch.id)) + messages.error( + request, "No open and multiple future commitfests exist!" + ) + return HttpResponseRedirect( + "/%s/%s/" % (poc.commitfest.id, poc.patch.id) + ) elif len(newcf) != 1: messages.error(request, "Multiple open commitfests exists!") - return HttpResponseRedirect('/%s/%s/' % (poc.commitfest.id, poc.patch.id)) + return HttpResponseRedirect("/%s/%s/" % (poc.commitfest.id, poc.patch.id)) elif newcf[0] == poc.commitfest: # The current open CF is the same one that we are already on. # In this case, try to see if there is a future CF we can # move it to. newcf = CommitFest.objects.filter(status=CommitFest.STATUS_FUTURE) if len(newcf) == 0: - messages.error(request, "Cannot move patch to the same commitfest, and no future commitfests exist!") - return HttpResponseRedirect('/%s/%s/' % (poc.commitfest.id, poc.patch.id)) + messages.error( + request, + "Cannot move patch to the same commitfest, and no future commitfests exist!", + ) + return HttpResponseRedirect( + "/%s/%s/" % (poc.commitfest.id, poc.patch.id) + ) elif len(newcf) != 1: - messages.error(request, "Cannot move patch to the same commitfest, and multiple future commitfests exist!") - return HttpResponseRedirect('/%s/%s/' % (poc.commitfest.id, poc.patch.id)) + messages.error( + request, + "Cannot move patch to the same commitfest, and multiple future commitfests exist!", + ) + return HttpResponseRedirect( + "/%s/%s/" % (poc.commitfest.id, poc.patch.id) + ) # Create a mapping to the new commitfest that we are bouncing # this patch to. - newpoc = PatchOnCommitFest(patch=poc.patch, - commitfest=newcf[0], - status=oldstatus, - enterdate=datetime.now()) + newpoc = PatchOnCommitFest( + patch=poc.patch, + commitfest=newcf[0], + status=oldstatus, + enterdate=datetime.now(), + ) newpoc.save() - elif status == 'committed': - committer = get_object_or_404(Committer, user__username=request.GET['c']) + elif status == "committed": + committer = get_object_or_404(Committer, user__username=request.GET["c"]) if committer != poc.patch.committer: # Committer changed! prevcommitter = poc.patch.committer poc.patch.committer = committer - PatchHistory(patch=poc.patch, by=request.user, what='Changed committer to %s' % committer).save_and_notify(prevcommitter=prevcommitter) + PatchHistory( + patch=poc.patch, + by=request.user, + what="Changed committer to %s" % committer, + ).save_and_notify(prevcommitter=prevcommitter) poc.status = PatchOnCommitFest.STATUS_COMMITTED else: raise Exception("Can't happen") @@ -661,63 +931,81 @@ def close(request, cfid, patchid, status): poc.patch.save() poc.save() - PatchHistory(patch=poc.patch, by=request.user, what='Closed in commitfest %s with status: %s' % (poc.commitfest, poc.statusstring)).save_and_notify() + PatchHistory( + patch=poc.patch, + by=request.user, + what="Closed in commitfest %s with status: %s" + % (poc.commitfest, poc.statusstring), + ).save_and_notify() - return HttpResponseRedirect('/%s/%s/' % (poc.commitfest.id, poc.patch.id)) + return HttpResponseRedirect("/%s/%s/" % (poc.commitfest.id, poc.patch.id)) @login_required @transaction.atomic -def reviewer(request, cfid, patchid, status): - get_object_or_404(CommitFest, pk=cfid) +def reviewer(request, patchid, status): patch = get_object_or_404(Patch, pk=patchid) is_reviewer = request.user in patch.reviewers.all() - if status == 'become' and not is_reviewer: + if status == "become" and not is_reviewer: patch.reviewers.add(request.user) patch.set_modified() - PatchHistory(patch=patch, by=request.user, what='Added %s as reviewer' % request.user.username).save_and_notify() - elif status == 'remove' and is_reviewer: + PatchHistory( + patch=patch, + by=request.user, + what="Added %s as reviewer" % request.user.username, + ).save_and_notify() + elif status == "remove" and is_reviewer: patch.reviewers.remove(request.user) patch.set_modified() - PatchHistory(patch=patch, by=request.user, what='Removed %s from reviewers' % request.user.username).save_and_notify() - return HttpResponseRedirect('../../') + PatchHistory( + patch=patch, + by=request.user, + what="Removed %s from reviewers" % request.user.username, + ).save_and_notify() + return HttpResponseRedirect("../../") @login_required @transaction.atomic def committer(request, cfid, patchid, status): - get_object_or_404(CommitFest, pk=cfid) patch = get_object_or_404(Patch, pk=patchid) committer = list(Committer.objects.filter(user=request.user, active=True)) if len(committer) == 0: - return HttpResponseForbidden('Only committers can do that!') + return HttpResponseForbidden("Only committers can do that!") committer = committer[0] is_committer = committer == patch.committer prevcommitter = patch.committer - if status == 'become' and not is_committer: + if status == "become" and not is_committer: patch.committer = committer patch.set_modified() - PatchHistory(patch=patch, by=request.user, what='Added %s as committer' % request.user.username).save_and_notify(prevcommitter=prevcommitter) - elif status == 'remove' and is_committer: + PatchHistory( + patch=patch, + by=request.user, + what="Added %s as committer" % request.user.username, + ).save_and_notify(prevcommitter=prevcommitter) + elif status == "remove" and is_committer: patch.committer = None patch.set_modified() - PatchHistory(patch=patch, by=request.user, what='Removed %s from committers' % request.user.username).save_and_notify(prevcommitter=prevcommitter) + PatchHistory( + patch=patch, + by=request.user, + what="Removed %s from committers" % request.user.username, + ).save_and_notify(prevcommitter=prevcommitter) patch.save() - return HttpResponseRedirect('../../') + return HttpResponseRedirect("../../") @login_required @transaction.atomic -def subscribe(request, cfid, patchid, sub): - get_object_or_404(CommitFest, pk=cfid) +def subscribe(request, patchid, sub): patch = get_object_or_404(Patch, pk=patchid) - if sub == 'un': + if sub == "un": patch.subscribers.remove(request.user) messages.info(request, "You have been unsubscribed from updates on this patch") else: @@ -727,6 +1015,12 @@ def subscribe(request, cfid, patchid, sub): return HttpResponseRedirect("../") +def send_patch_email(request, patchid): + patch = get_object_or_404(Patch, pk=patchid) + cf = patch.current_commitfest() + return send_email(request, cf.id) + + @login_required @transaction.atomic def send_email(request, cfid): @@ -734,74 +1028,277 @@ def send_email(request, cfid): if not request.user.is_staff: raise Http404("Only CF managers can do that.") - if request.method == 'POST': - authoridstring = request.POST['authors'] - revieweridstring = request.POST['reviewers'] + if request.method == "POST": + authoridstring = request.POST["authors"] + revieweridstring = request.POST["reviewers"] form = BulkEmailForm(data=request.POST) if form.is_valid(): q = Q() if authoridstring: - q = q | Q(patch_author__in=[int(x) for x in authoridstring.split(',')]) + q = q | Q(patch_author__in=[int(x) for x in authoridstring.split(",")]) if revieweridstring: - q = q | Q(patch_reviewer__in=[int(x) for x in revieweridstring.split(',')]) + q = q | Q( + patch_reviewer__in=[int(x) for x in revieweridstring.split(",")] + ) recipients = User.objects.filter(q).distinct() for r in recipients: - send_simple_mail(UserWrapper(request.user).email, r.email, form.cleaned_data['subject'], form.cleaned_data['body'], request.user.username) - messages.add_message(request, messages.INFO, "Sent email to %s" % r.email) - return HttpResponseRedirect('..') + send_simple_mail( + UserWrapper(request.user).email, + r.email, + form.cleaned_data["subject"], + form.cleaned_data["body"], + request.user.username, + ) + messages.add_message( + request, messages.INFO, "Sent email to %s" % r.email + ) + return HttpResponseRedirect("..") else: - authoridstring = request.GET.get('authors', None) - revieweridstring = request.GET.get('reviewers', None) - form = BulkEmailForm(initial={'authors': authoridstring, 'reviewers': revieweridstring}) + authoridstring = request.GET.get("authors", None) + revieweridstring = request.GET.get("reviewers", None) + form = BulkEmailForm( + initial={"authors": authoridstring, "reviewers": revieweridstring} + ) if authoridstring: - authors = list(User.objects.filter(patch_author__in=[int(x) for x in authoridstring.split(',')]).distinct()) + authors = list( + User.objects.filter( + patch_author__in=[int(x) for x in authoridstring.split(",")] + ).distinct() + ) else: authors = [] if revieweridstring: - reviewers = list(User.objects.filter(patch_reviewer__in=[int(x) for x in revieweridstring.split(',')]).distinct()) + reviewers = list( + User.objects.filter( + patch_reviewer__in=[int(x) for x in revieweridstring.split(",")] + ).distinct() + ) else: reviewers = [] if len(authors) == 0 and len(reviewers) == 0: - messages.add_message(request, messages.WARNING, "No recipients specified, cannot send email") - return HttpResponseRedirect('..') + messages.add_message( + request, messages.WARNING, "No recipients specified, cannot send email" + ) + return HttpResponseRedirect("..") - messages.add_message(request, messages.INFO, "Email will be sent from: %s" % UserWrapper(request.user).email) + messages.add_message( + request, + messages.INFO, + "Email will be sent from: %s" % UserWrapper(request.user).email, + ) def _user_and_mail(u): return "%s %s (%s)" % (u.first_name, u.last_name, u.email) if len(authors): - messages.add_message(request, messages.INFO, "The email will be sent to the following authors: %s" % ", ".join([_user_and_mail(u) for u in authors])) + messages.add_message( + request, + messages.INFO, + "The email will be sent to the following authors: %s" + % ", ".join([_user_and_mail(u) for u in authors]), + ) if len(reviewers): - messages.add_message(request, messages.INFO, "The email will be sent to the following reviewers: %s" % ", ".join([_user_and_mail(u) for u in reviewers])) + messages.add_message( + request, + messages.INFO, + "The email will be sent to the following reviewers: %s" + % ", ".join([_user_and_mail(u) for u in reviewers]), + ) + + return render( + request, + "base_form.html", + { + "cf": cf, + "form": form, + "title": "Send email", + "breadcrumbs": [ + {"title": cf.title, "href": "/%s/" % cf.pk}, + ], + "savebutton": "Send email", + }, + ) + + +@transaction.atomic +def cfbot_ingest(message): + """Ingest a single message status update message receive from cfbot. It + should be a Python dictionary, decoded from JSON already.""" - return render(request, 'base_form.html', { - 'cf': cf, - 'form': form, - 'title': 'Send email', - 'breadcrumbs': [{'title': cf.title, 'href': '/%s/' % cf.pk}, ], - 'savebutton': 'Send email', - }) + cursor = connection.cursor() + + branch_status = message["branch_status"] + patch_id = branch_status["submission_id"] + branch_id = branch_status["branch_id"] + + try: + patch = Patch.objects.get(pk=patch_id) + except Patch.DoesNotExist: + # If the patch doesn't exist, there's nothing to do. This should never + # happen in production, but on the test system it's possible because + # not it doesn't contain the newest patches that the CFBot knows about. + return + + # Every message should have a branch_status, which we will INSERT + # or UPDATE. We do this first, because cfbot_task refers to it. + # Due to the way messages are sent/queued by cfbot it's possible that it + # sends the messages out-of-order. To handle this we we only update in two + # cases: + # 1. The created time of the branch is newer than the one in our database: + # This is a newer branch + # 2. If it's the same branch that we already have, but the modified time is + # newer: This is a status update for the current branch that we received + # in-order. + cursor.execute( + """INSERT INTO commitfest_cfbotbranch (patch_id, branch_id, + branch_name, commit_id, + apply_url, status, + created, modified, + version, patch_count, + first_additions, first_deletions, + all_additions, all_deletions + ) + VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) + ON CONFLICT (patch_id) DO UPDATE + SET status = EXCLUDED.status, + modified = EXCLUDED.modified, + branch_id = EXCLUDED.branch_id, + branch_name = EXCLUDED.branch_name, + commit_id = EXCLUDED.commit_id, + apply_url = EXCLUDED.apply_url, + created = EXCLUDED.created, + version = EXCLUDED.version, + patch_count = EXCLUDED.patch_count, + first_additions = EXCLUDED.first_additions, + first_deletions = EXCLUDED.first_deletions, + all_additions = EXCLUDED.all_additions, + all_deletions = EXCLUDED.all_deletions + WHERE commitfest_cfbotbranch.created < EXCLUDED.created + OR (commitfest_cfbotbranch.branch_id = EXCLUDED.branch_id + AND commitfest_cfbotbranch.modified < EXCLUDED.modified) + """, + ( + patch_id, + branch_id, + branch_status["branch_name"], + branch_status["commit_id"], + branch_status["apply_url"], + branch_status["status"], + branch_status["created"], + branch_status["modified"], + branch_status["version"], + branch_status["patch_count"], + branch_status["first_additions"], + branch_status["first_deletions"], + branch_status["all_additions"], + branch_status["all_deletions"], + ), + ) + + # Now we check what we have in our database. If that contains a different + # branch_id than what we just tried to insert, then apparently this is a + # status update for an old branch and we don't care about any of the + # contents of this message. + branch_in_db = CfbotBranch.objects.get(pk=patch_id) + if branch_in_db.branch_id != branch_id: + return + + # Most messages have a task_status. It might be missing in rare cases, like + # when cfbot decides that a whole branch has timed out. We INSERT or + # UPDATE. + if "task_status" in message: + task_status = message["task_status"] + cursor.execute( + """INSERT INTO commitfest_cfbottask (task_id, task_name, patch_id, branch_id, + position, status, + created, modified) + VALUES (%s, %s, %s, %s, %s, %s, %s, %s) + ON CONFLICT (task_id) DO UPDATE + SET status = EXCLUDED.status, + modified = EXCLUDED.modified + WHERE commitfest_cfbottask.modified < EXCLUDED.modified""", + ( + task_status["task_id"], + task_status["task_name"], + patch_id, + branch_id, + task_status["position"], + task_status["status"], + task_status["created"], + task_status["modified"], + ), + ) + + # Remove any old tasks that are not related to this branch. These should + # only be left over when we just updated the branch_id. Knowing if we just + # updated the branch_id was is not trivial though, because INSERT ON + # CONFLICT does not allow us to easily return the old value of the row. + # So instead we always delete all tasks that are not related to this + # branch. This is fine, because doing so is very cheap in the no-op case + # because we have an index on patch_id and there's only a handful of tasks + # per patch. + cursor.execute( + "DELETE FROM commitfest_cfbottask WHERE patch_id=%s AND branch_id != %s", + (patch_id, branch_id), + ) + + # We change the needs_rebase field using a separate UPDATE because this way + # we can find out what the previous state of the field was (sadly INSERT ON + # CONFLICT does not allow us to return that). We need to know the previous + # state so we can skip sending notifications if the needs_rebase status did + # not change. + needs_rebase = branch_status["commit_id"] is None + if bool(branch_in_db.needs_rebase_since) is not needs_rebase: + if needs_rebase: + branch_in_db.needs_rebase_since = datetime.now() + else: + branch_in_db.needs_rebase_since = None + branch_in_db.save() + + if needs_rebase: + PatchHistory( + patch=patch, by=None, by_cfbot=True, what="Patch needs rebase" + ).save_and_notify(authors_only=True) + else: + PatchHistory( + patch=patch, + by=None, + by_cfbot=True, + what="Patch does not need rebase anymore", + ).save_and_notify(authors_only=True) + + +@csrf_exempt +def cfbot_notify(request): + if request.method != "POST": + return HttpResponseForbidden("Invalid method") + + j = json.loads(request.body) + if not hmac.compare_digest(j["shared_secret"], settings.CFBOT_SECRET): + return HttpResponseForbidden("Invalid API key") + + cfbot_ingest(j) + return HttpResponse(status=200) @csrf_exempt def thread_notify(request): - if request.method != 'POST': + if request.method != "POST": return HttpResponseForbidden("Invalid method") j = json.loads(request.body) - if j['apikey'] != settings.ARCHIVES_APIKEY: + if j["apikey"] != settings.ARCHIVES_APIKEY: return HttpResponseForbidden("Invalid API key") - for m in j['messageids']: + for m in j["messageids"]: try: t = MailThread.objects.get(messageid=m) refresh_single_thread(t) - except Exception as e: + except Exception: # Just ignore it, we'll check again later pass diff --git a/pgcommitfest/commitfest/widgets.py b/pgcommitfest/commitfest/widgets.py index 4af8b2d8..e6891621 100644 --- a/pgcommitfest/commitfest/widgets.py +++ b/pgcommitfest/commitfest/widgets.py @@ -4,7 +4,11 @@ class ThreadPickWidget(TextInput): def render(self, name, value, attrs=None, renderer=None): - attrs['class'] += ' threadpick-input' + attrs["class"] += " threadpick-input" html = super(ThreadPickWidget, self).render(name, value, attrs) - html = html + ' ' % name + html = ( + html + + ' ' + % name + ) return mark_safe(html) diff --git a/pgcommitfest/local_settings_example.py b/pgcommitfest/local_settings_example.py index d3648ccb..c49b000f 100644 --- a/pgcommitfest/local_settings_example.py +++ b/pgcommitfest/local_settings_example.py @@ -1,3 +1,5 @@ +import os + # Enable more debugging information DEBUG = True # Prevent logging to try to send emails to postgresql.org admins. @@ -5,20 +7,26 @@ LOGGING = None DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.postgresql_psycopg2', - 'NAME': 'pgcommitfest', - 'USER': 'postgres', - 'PASSWORD': 'postgres', - 'HOST': '0.0.0.0', + "default": { + "ENGINE": "django.db.backends.postgresql_psycopg2", + "NAME": "pgcommitfest", + "USER": "postgres", + "PASSWORD": "postgres", + "HOST": "0.0.0.0", } } # Disables the PostgreSQL.ORG authentication. # Use the default built-in Django authentication module. -AUTHENTICATION_BACKENDS = ['django.contrib.auth.backends.ModelBackend'] +AUTHENTICATION_BACKENDS = ["django.contrib.auth.backends.ModelBackend"] # The only login page we have in development is the Django admin login page. # It's not great, because it won't redirect to the page you were trying to # access, but it's better than a HTTP 500 error. -PGAUTH_REDIRECT = '/admin/login/' +PGAUTH_REDIRECT = "/admin/login/" + +MOCK_ARCHIVES = True +BASE_DIR = os.path.dirname(os.path.abspath(__file__)) +MOCK_ARCHIVE_DATA = os.path.join( + BASE_DIR, "commitfest", "fixtures", "archive_data.json" +) diff --git a/pgcommitfest/mailqueue/migrations/0001_initial.py b/pgcommitfest/mailqueue/migrations/0001_initial.py index cd392829..38d1214d 100644 --- a/pgcommitfest/mailqueue/migrations/0001_initial.py +++ b/pgcommitfest/mailqueue/migrations/0001_initial.py @@ -5,18 +5,24 @@ class Migration(migrations.Migration): - - dependencies = [ - ] + dependencies = [] operations = [ migrations.CreateModel( - name='QueuedMail', + name="QueuedMail", fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('sender', models.EmailField(max_length=100)), - ('receiver', models.EmailField(max_length=100)), - ('fullmsg', models.TextField()), + ( + "id", + models.AutoField( + verbose_name="ID", + serialize=False, + auto_created=True, + primary_key=True, + ), + ), + ("sender", models.EmailField(max_length=100)), + ("receiver", models.EmailField(max_length=100)), + ("fullmsg", models.TextField()), ], ), ] diff --git a/pgcommitfest/mailqueue/util.py b/pgcommitfest/mailqueue/util.py index 435f083e..441802b3 100644 --- a/pgcommitfest/mailqueue/util.py +++ b/pgcommitfest/mailqueue/util.py @@ -1,34 +1,38 @@ from django.template.loader import get_template -from email.mime.text import MIMEText +from email import encoders from email.mime.multipart import MIMEMultipart from email.mime.nonmultipart import MIMENonMultipart +from email.mime.text import MIMEText from email.utils import formatdate -from email import encoders from .models import QueuedMail -def send_simple_mail(sender, receiver, subject, msgtxt, sending_username, attachments=None): +def send_simple_mail( + sender, receiver, subject, msgtxt, sending_username, attachments=None +): # attachment format, each is a tuple of (name, mimetype,contents) # content should already be base64 encoded msg = MIMEMultipart() - msg['Subject'] = subject - msg['To'] = receiver - msg['From'] = sender - msg['Date'] = formatdate(localtime=True) - msg['User-Agent'] = 'pgcommitfest' + msg["Subject"] = subject + msg["To"] = receiver + msg["From"] = sender + msg["Date"] = formatdate(localtime=True) + msg["User-Agent"] = "pgcommitfest" if sending_username: - msg['X-cfsender'] = sending_username + msg["X-cfsender"] = sending_username - msg.attach(MIMEText(msgtxt, _charset='utf-8')) + msg.attach(MIMEText(msgtxt, _charset="utf-8")) if attachments: for filename, contenttype, content in attachments: - main, sub = contenttype.split('/') + main, sub = contenttype.split("/") part = MIMENonMultipart(main, sub) part.set_payload(content) - part.add_header('Content-Disposition', 'attachment; filename="%s"' % filename) + part.add_header( + "Content-Disposition", 'attachment; filename="%s"' % filename + ) encoders.encode_base64(part) msg.attach(part) @@ -41,7 +45,19 @@ def send_mail(sender, receiver, fullmsg): QueuedMail(sender=sender, receiver=receiver, fullmsg=fullmsg).save() -def send_template_mail(sender, senderaccountname, receiver, subject, templatename, templateattr={}, usergenerated=False): - send_simple_mail(sender, receiver, subject, - get_template(templatename).render(templateattr), - senderaccountname) +def send_template_mail( + sender, + senderaccountname, + receiver, + subject, + templatename, + templateattr={}, + usergenerated=False, +): + send_simple_mail( + sender, + receiver, + subject, + get_template(templatename).render(templateattr), + senderaccountname, + ) diff --git a/pgcommitfest/settings.py b/pgcommitfest/settings.py index ffac58cc..9b867b71 100644 --- a/pgcommitfest/settings.py +++ b/pgcommitfest/settings.py @@ -1,28 +1,25 @@ # Django settings for pgcommitfest project. -from django.conf import global_settings DEBUG = False TEMPLATE_DEBUG = DEBUG -ALLOWED_HOSTS = ['*'] +ALLOWED_HOSTS = ["*"] -ADMINS = ( - ('webmaster@postgresql.org', 'webmaster@postgresql.org'), -) +ADMINS = (("webmaster@postgresql.org", "webmaster@postgresql.org"),) MANAGERS = ADMINS DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'. - 'NAME': 'pgcommitfest', # Or path to database file if using sqlite3. - 'USER': '', # Not used with sqlite3. - 'PASSWORD': '', # Not used with sqlite3. - 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. - 'PORT': '', # Set to empty string for default. Not used with sqlite3. + "default": { + "ENGINE": "django.db.backends.postgresql_psycopg2", # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'. + "NAME": "pgcommitfest", # Or path to database file if using sqlite3. + "USER": "", # Not used with sqlite3. + "PASSWORD": "", # Not used with sqlite3. + "HOST": "", # Set to empty string for localhost. Not used with sqlite3. + "PORT": "", # Set to empty string for default. Not used with sqlite3. } } -DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' +DEFAULT_AUTO_FIELD = "django.db.models.AutoField" # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name @@ -31,11 +28,11 @@ # timezone as the operating system. # If running in a Windows environment this must be set to the same as your # system time zone. -TIME_ZONE = 'GMT' +TIME_ZONE = "GMT" # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html -LANGUAGE_CODE = 'en-us' +LANGUAGE_CODE = "en-us" SITE_ID = 1 @@ -49,27 +46,27 @@ # Absolute filesystem path to the directory that will hold user-uploaded files. # Example: "/home/media/media.lawrence.com/media/" -MEDIA_ROOT = '' +MEDIA_ROOT = "" # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash. # Examples: "http://media.lawrence.com/media/", "http://example.com/media/" -MEDIA_URL = '' +MEDIA_URL = "" # Absolute path to the directory static files should be collected to. # Don't put anything in this directory yourself; store your static files # in apps' "static/" subdirectories and in STATICFILES_DIRS. # Example: "/home/media/media.lawrence.com/static/" -STATIC_ROOT = '' +STATIC_ROOT = "" # URL prefix for static files. # Example: "http://media.lawrence.com/static/" -STATIC_URL = '/media/' +STATIC_URL = "/media/" # URL prefix for admin static files -- CSS, JavaScript and images. # Make sure to use a trailing slash. # Examples: "http://foo.com/static/admin/", "/static/admin/". -ADMIN_MEDIA_PREFIX = '/media/admin/' +ADMIN_MEDIA_PREFIX = "/media/admin/" # Additional locations of static files STATICFILES_DIRS = ( @@ -80,55 +77,54 @@ # List of finder classes that know how to find static files in # various locations. -STATICFILES_FINDERS = ( -) +STATICFILES_FINDERS = () # Make this unique, and don't share it with anybody. -SECRET_KEY = 'REALLYCHANGETHISINLOCAL_SETTINGS.PY' +SECRET_KEY = "REALLYCHANGETHISINLOCAL_SETTINGS.PY" MIDDLEWARE = ( - 'django.middleware.common.CommonMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', + "django.middleware.common.CommonMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", ) -ROOT_URLCONF = 'pgcommitfest.urls' - -TEMPLATES = [{ - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': ['global_templates'], - 'OPTIONS': { - 'context_processors': [ - 'django.template.context_processors.request', - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', - ], - 'loaders': [ - 'django.template.loaders.filesystem.Loader', - 'django.template.loaders.app_directories.Loader', - ], - }, -}] +ROOT_URLCONF = "pgcommitfest.urls" + +TEMPLATES = [ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": ["global_templates"], + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", + ], + "loaders": [ + "django.template.loaders.filesystem.Loader", + "django.template.loaders.app_directories.Loader", + ], + }, + } +] INSTALLED_APPS = ( - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.messages', + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", # Uncomment the next line to enable the admin: - 'django.contrib.admin', + "django.contrib.admin", # Uncomment the next line to enable admin documentation: # 'django.contrib.admindocs', - 'pgcommitfest.commitfest.apps.CFAppConfig', - 'pgcommitfest.mailqueue', - 'pgcommitfest.userprofile', + "pgcommitfest.commitfest.apps.CFAppConfig", + "pgcommitfest.mailqueue", + "pgcommitfest.userprofile", ) -AUTHENTICATION_BACKENDS = ( - 'pgcommitfest.auth.AuthBackend', -) +AUTHENTICATION_BACKENDS = ("pgcommitfest.auth.AuthBackend",) # A sample logging configuration. The only tangible logging @@ -137,32 +133,29 @@ # See http://docs.djangoproject.com/en/dev/topics/logging for # more details on how to customize your logging configuration. LOGGING = { - 'version': 1, - 'disable_existing_loggers': False, - 'handlers': { - 'mail_admins': { - 'level': 'ERROR', - 'class': 'django.utils.log.AdminEmailHandler' - } + "version": 1, + "disable_existing_loggers": False, + "handlers": { + "mail_admins": {"level": "ERROR", "class": "django.utils.log.AdminEmailHandler"} }, - 'loggers': { - 'django.request': { - 'handlers': ['mail_admins'], - 'level': 'ERROR', - 'propagate': True, + "loggers": { + "django.request": { + "handlers": ["mail_admins"], + "level": "ERROR", + "propagate": True, }, - } + }, } -DATE_FORMAT = 'Y-m-d' -DATETIME_FORMAT = 'Y-m-d H:i:s' +DATE_FORMAT = "Y-m-d" +DATETIME_FORMAT = "Y-m-d H:i:s" -LOGIN_URL = '/account/login/' +LOGIN_URL = "/account/login/" -ARCHIVES_TIMEOUT = 10 # Seconds to wait for calls to the archives +ARCHIVES_TIMEOUT = 10 # Seconds to wait for calls to the archives ARCHIVES_SERVER = "localhost" ARCHIVES_PORT = "8001" -ARCHIVES_HOST = "archives.postgresql.org" # Host: header to send +ARCHIVES_HOST = "archives.postgresql.org" # Host: header to send ARCHIVES_APIKEY = None # Email address to pgsql-hackers. Set to something local to test maybe? @@ -173,6 +166,6 @@ # Load local settings overrides try: - from .local_settings import * + from .local_settings import * # noqa: F403 except ImportError: pass diff --git a/pgcommitfest/urls.py b/pgcommitfest/urls.py index 53fce1c1..e94f9e94 100644 --- a/pgcommitfest/urls.py +++ b/pgcommitfest/urls.py @@ -1,11 +1,11 @@ -from django.urls import re_path from django.contrib import admin +from django.urls import re_path -import pgcommitfest.commitfest.views as views -import pgcommitfest.commitfest.reports as reports +import pgcommitfest.auth import pgcommitfest.commitfest.ajax as ajax import pgcommitfest.commitfest.lookups as lookups -import pgcommitfest.auth +import pgcommitfest.commitfest.reports as reports +import pgcommitfest.commitfest.views as views import pgcommitfest.userprofile.views # Uncomment the next two lines to enable the admin: @@ -14,45 +14,49 @@ urlpatterns = [ - re_path(r'^$', views.home), - re_path(r'^activity(?P\.rss)?/', views.activity), - re_path(r'^(\d+)/$', views.commitfest), - re_path(r'^(open|inprogress|current)/(.*)$', views.redir), - re_path(r'^(?P\d+)/activity(?P\.rss)?/$', views.activity), - re_path(r'^patch/(\d+)/$', views.patch_redirect), - re_path(r'^(\d+)/(\d+)/$', views.patch), - re_path(r'^(\d+)/(\d+)/edit/$', views.patchform), - re_path(r'^(\d+)/new/$', views.newpatch), - re_path(r'^(\d+)/(\d+)/status/(review|author|committer)/$', views.status), - re_path(r'^(\d+)/(\d+)/close/(reject|withdrawn|feedback|committed|next)/$', views.close), - re_path(r'^(\d+)/(\d+)/reviewer/(become|remove)/$', views.reviewer), - re_path(r'^(\d+)/(\d+)/committer/(become|remove)/$', views.committer), - re_path(r'^(\d+)/(\d+)/(un)?subscribe/$', views.subscribe), - re_path(r'^(\d+)/(\d+)/(comment|review)/', views.comment), - re_path(r'^(\d+)/send_email/$', views.send_email), - re_path(r'^(\d+)/\d+/send_email/$', views.send_email), - re_path(r'^(\d+)/reports/authorstats/$', reports.authorstats), - re_path(r'^search/$', views.global_search), - re_path(r'^ajax/(\w+)/$', ajax.main), - re_path(r'^lookups/user/$', lookups.userlookup), - re_path(r'^thread_notify/$', views.thread_notify), - + re_path(r"^$", views.home), + re_path(r"^activity(?P\.rss)?/", views.activity), + re_path(r"^(\d+)/$", views.commitfest), + re_path(r"^(open|inprogress|current)/(.*)$", views.redir), + re_path(r"^(?P\d+)/activity(?P\.rss)?/$", views.activity), + re_path(r"^(\d+)/(\d+)/$", views.patch_legacy_redirect), + re_path(r"^patch/(\d+)/$", views.patch), + re_path(r"^patch/(\d+)/edit/$", views.patchform), + re_path(r"^(\d+)/new/$", views.newpatch), + re_path(r"^patch/(\d+)/status/(review|author|committer)/$", views.status), + re_path( + r"^patch/(\d+)/close/(reject|withdrawn|feedback|committed|next)/$", views.close + ), + re_path(r"^patch/(\d+)/reviewer/(become|remove)/$", views.reviewer), + re_path(r"^patch/(\d+)/committer/(become|remove)/$", views.committer), + re_path(r"^patch/(\d+)/(un)?subscribe/$", views.subscribe), + re_path(r"^patch/(\d+)/(comment|review)/", views.comment), + re_path(r"^(\d+)/send_email/$", views.send_email), + re_path(r"^patch/(\d+)/send_email/$", views.send_patch_email), + re_path(r"^(\d+)/reports/authorstats/$", reports.authorstats), + re_path(r"^search/$", views.global_search), + re_path(r"^ajax/(\w+)/$", ajax.main), + re_path(r"^lookups/user/$", lookups.userlookup), + re_path(r"^thread_notify/$", views.thread_notify), + re_path(r"^cfbot_notify/$", views.cfbot_notify), + # Legacy email POST route. This can be safely removed in a few days from + # the first time this is deployed. It's only puprose is not breaking + # submissions from a previous page lood, during the deploy of the new + # /patch/(\d+) routes. It would be a shame if someone lost their well + # written email because of this. + re_path(r"^\d+/(\d+)/send_email/$", views.send_patch_email), # Auth system integration - re_path(r'^(?:account/)?login/?$', pgcommitfest.auth.login), - re_path(r'^(?:account/)?logout/?$', pgcommitfest.auth.logout), - re_path(r'^auth_receive/$', pgcommitfest.auth.auth_receive), - re_path(r'^auth_api/$', pgcommitfest.auth.auth_api), - + re_path(r"^(?:account/)?login/?$", pgcommitfest.auth.login), + re_path(r"^(?:account/)?logout/?$", pgcommitfest.auth.logout), + re_path(r"^auth_receive/$", pgcommitfest.auth.auth_receive), + re_path(r"^auth_api/$", pgcommitfest.auth.auth_api), # Account management - re_path(r'^account/profile/$', pgcommitfest.userprofile.views.userprofile), - + re_path(r"^account/profile/$", pgcommitfest.userprofile.views.userprofile), # Examples: # re_path(r'^$', 'pgpgcommitfest.commitfest.views.home', name='home), # re_path(r'^pgcommitfest/', include('pgcommitfest.foo.urls)), - # Uncomment the admin/doc line below to enable admin documentation: # re_path(r'^admin/doc/', include('django.contrib.admindocs.urls)), - # Uncomment the next line to enable the admin: - re_path(r'^admin/', admin.site.urls), + re_path(r"^admin/", admin.site.urls), ] diff --git a/pgcommitfest/userprofile/admin.py b/pgcommitfest/userprofile/admin.py index 5bf19e76..bc847464 100644 --- a/pgcommitfest/userprofile/admin.py +++ b/pgcommitfest/userprofile/admin.py @@ -4,7 +4,7 @@ class UserProfileAdmin(admin.ModelAdmin): - list_display = ('user', ) + list_display = ("user",) admin.site.register(UserProfile, UserProfileAdmin) diff --git a/pgcommitfest/userprofile/forms.py b/pgcommitfest/userprofile/forms.py index 35d74bd5..e0ed5a1b 100644 --- a/pgcommitfest/userprofile/forms.py +++ b/pgcommitfest/userprofile/forms.py @@ -1,22 +1,26 @@ from django import forms -from .models import UserProfile, UserExtraEmail +from .models import UserExtraEmail, UserProfile class UserProfileForm(forms.ModelForm): class Meta: model = UserProfile - exclude = ('user', ) + exclude = ("user",) def __init__(self, user, *args, **kwargs): super(UserProfileForm, self).__init__(*args, **kwargs) self.user = user - mailhelp = "To add a new address to choose from, update your user profile on postgresql.org." + mailhelp = 'To add a new address to choose from, update your user profile on postgresql.org.' - self.fields['selectedemail'].empty_label = self.user.email - self.fields['selectedemail'].queryset = UserExtraEmail.objects.filter(user=self.user) - self.fields['selectedemail'].help_text = mailhelp - self.fields['notifyemail'].empty_label = self.user.email - self.fields['notifyemail'].queryset = UserExtraEmail.objects.filter(user=self.user) - self.fields['notifyemail'].help_text = mailhelp + self.fields["selectedemail"].empty_label = self.user.email + self.fields["selectedemail"].queryset = UserExtraEmail.objects.filter( + user=self.user + ) + self.fields["selectedemail"].help_text = mailhelp + self.fields["notifyemail"].empty_label = self.user.email + self.fields["notifyemail"].queryset = UserExtraEmail.objects.filter( + user=self.user + ) + self.fields["notifyemail"].help_text = mailhelp diff --git a/pgcommitfest/userprofile/migrations/0001_initial.py b/pgcommitfest/userprofile/migrations/0001_initial.py index 399d26f4..45ad7696 100644 --- a/pgcommitfest/userprofile/migrations/0001_initial.py +++ b/pgcommitfest/userprofile/migrations/0001_initial.py @@ -1,41 +1,75 @@ # -*- coding: utf-8 -*- from __future__ import unicode_literals -from django.db import migrations, models from django.conf import settings +from django.db import migrations, models class Migration(migrations.Migration): - dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( - name='UserExtraEmail', + name="UserExtraEmail", fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('email', models.EmailField(unique=True, max_length=100)), - ('confirmed', models.BooleanField(default=False)), - ('token', models.CharField(max_length=100, blank=True)), - ('tokensent', models.DateTimeField()), - ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)), + ( + "id", + models.AutoField( + verbose_name="ID", + serialize=False, + auto_created=True, + primary_key=True, + ), + ), + ("email", models.EmailField(unique=True, max_length=100)), + ("confirmed", models.BooleanField(default=False)), + ("token", models.CharField(max_length=100, blank=True)), + ("tokensent", models.DateTimeField()), + ( + "user", + models.ForeignKey( + to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE + ), + ), ], options={ - 'ordering': ('user', 'email'), + "ordering": ("user", "email"), }, ), migrations.CreateModel( - name='UserProfile', + name="UserProfile", fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('selectedemail', models.ForeignKey(verbose_name='Sender email', blank=True, to='userprofile.UserExtraEmail', null=True, on_delete=models.CASCADE)), - ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)), + ( + "id", + models.AutoField( + verbose_name="ID", + serialize=False, + auto_created=True, + primary_key=True, + ), + ), + ( + "selectedemail", + models.ForeignKey( + verbose_name="Sender email", + blank=True, + to="userprofile.UserExtraEmail", + null=True, + on_delete=models.CASCADE, + ), + ), + ( + "user", + models.ForeignKey( + to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE + ), + ), ], ), migrations.AlterUniqueTogether( - name='userextraemail', - unique_together=set([('user', 'email')]), + name="userextraemail", + unique_together=set([("user", "email")]), ), ] diff --git a/pgcommitfest/userprofile/migrations/0002_notifications.py b/pgcommitfest/userprofile/migrations/0002_notifications.py index 79260457..69eaf03a 100644 --- a/pgcommitfest/userprofile/migrations/0002_notifications.py +++ b/pgcommitfest/userprofile/migrations/0002_notifications.py @@ -1,40 +1,54 @@ # -*- coding: utf-8 -*- from __future__ import unicode_literals -from django.db import migrations, models from django.conf import settings +from django.db import migrations, models class Migration(migrations.Migration): - dependencies = [ - ('userprofile', '0001_initial'), + ("userprofile", "0001_initial"), ] operations = [ migrations.AddField( - model_name='userprofile', - name='notify_all_author', - field=models.BooleanField(default=False, verbose_name='Notify on all where author'), + model_name="userprofile", + name="notify_all_author", + field=models.BooleanField( + default=False, verbose_name="Notify on all where author" + ), ), migrations.AddField( - model_name='userprofile', - name='notify_all_committer', - field=models.BooleanField(default=False, verbose_name='Notify on all where committer'), + model_name="userprofile", + name="notify_all_committer", + field=models.BooleanField( + default=False, verbose_name="Notify on all where committer" + ), ), migrations.AddField( - model_name='userprofile', - name='notify_all_reviewer', - field=models.BooleanField(default=False, verbose_name='Notify on all where reviewer'), + model_name="userprofile", + name="notify_all_reviewer", + field=models.BooleanField( + default=False, verbose_name="Notify on all where reviewer" + ), ), migrations.AddField( - model_name='userprofile', - name='notifyemail', - field=models.ForeignKey(related_name='notifier', verbose_name='Notifications sent to', blank=True, to='userprofile.UserExtraEmail', null=True, on_delete=models.CASCADE), + model_name="userprofile", + name="notifyemail", + field=models.ForeignKey( + related_name="notifier", + verbose_name="Notifications sent to", + blank=True, + to="userprofile.UserExtraEmail", + null=True, + on_delete=models.CASCADE, + ), ), migrations.AlterField( - model_name='userprofile', - name='user', - field=models.OneToOneField(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE), + model_name="userprofile", + name="user", + field=models.OneToOneField( + to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE + ), ), ] diff --git a/pgcommitfest/userprofile/migrations/0003_emails_managed_upstream.py b/pgcommitfest/userprofile/migrations/0003_emails_managed_upstream.py index 165b6e4c..622e6796 100644 --- a/pgcommitfest/userprofile/migrations/0003_emails_managed_upstream.py +++ b/pgcommitfest/userprofile/migrations/0003_emails_managed_upstream.py @@ -1,36 +1,48 @@ # Generated by Django 2.2.11 on 2020-08-11 11:09 -from django.db import migrations, models import django.db.models.deletion +from django.db import migrations, models class Migration(migrations.Migration): - dependencies = [ - ('userprofile', '0002_notifications'), + ("userprofile", "0002_notifications"), ] operations = [ migrations.RemoveField( - model_name='userextraemail', - name='confirmed', + model_name="userextraemail", + name="confirmed", ), migrations.RemoveField( - model_name='userextraemail', - name='token', + model_name="userextraemail", + name="token", ), migrations.RemoveField( - model_name='userextraemail', - name='tokensent', + model_name="userextraemail", + name="tokensent", ), migrations.AlterField( - model_name='userprofile', - name='notifyemail', - field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='notifier', to='userprofile.UserExtraEmail', verbose_name='Notifications sent to'), + model_name="userprofile", + name="notifyemail", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="notifier", + to="userprofile.UserExtraEmail", + verbose_name="Notifications sent to", + ), ), migrations.AlterField( - model_name='userprofile', - name='selectedemail', - field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='userprofile.UserExtraEmail', verbose_name='Sender email'), + model_name="userprofile", + name="selectedemail", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="userprofile.UserExtraEmail", + verbose_name="Sender email", + ), ), ] diff --git a/pgcommitfest/userprofile/models.py b/pgcommitfest/userprofile/models.py index 79da6888..f6294109 100644 --- a/pgcommitfest/userprofile/models.py +++ b/pgcommitfest/userprofile/models.py @@ -1,29 +1,56 @@ -from django.db import models from django.contrib.auth.models import User +from django.db import models class UserExtraEmail(models.Model): - user = models.ForeignKey(User, null=False, blank=False, db_index=True, on_delete=models.CASCADE) + user = models.ForeignKey( + User, null=False, blank=False, db_index=True, on_delete=models.CASCADE + ) email = models.EmailField(max_length=100, null=False, blank=False, unique=True) def __str__(self): return self.email class Meta: - ordering = ('user', 'email') - unique_together = (('user', 'email'),) + ordering = ("user", "email") + unique_together = (("user", "email"),) class UserProfile(models.Model): user = models.OneToOneField(User, null=False, blank=False, on_delete=models.CASCADE) - selectedemail = models.ForeignKey(UserExtraEmail, null=True, blank=True, - verbose_name='Sender email', on_delete=models.SET_NULL) - notifyemail = models.ForeignKey(UserExtraEmail, null=True, blank=True, - verbose_name='Notifications sent to', - related_name='notifier', on_delete=models.SET_NULL) - notify_all_author = models.BooleanField(null=False, blank=False, default=False, verbose_name="Notify on all where author") - notify_all_reviewer = models.BooleanField(null=False, blank=False, default=False, verbose_name="Notify on all where reviewer") - notify_all_committer = models.BooleanField(null=False, blank=False, default=False, verbose_name="Notify on all where committer") + selectedemail = models.ForeignKey( + UserExtraEmail, + null=True, + blank=True, + verbose_name="Sender email", + on_delete=models.SET_NULL, + ) + notifyemail = models.ForeignKey( + UserExtraEmail, + null=True, + blank=True, + verbose_name="Notifications sent to", + related_name="notifier", + on_delete=models.SET_NULL, + ) + notify_all_author = models.BooleanField( + null=False, + blank=False, + default=False, + verbose_name="Notify on all where author", + ) + notify_all_reviewer = models.BooleanField( + null=False, + blank=False, + default=False, + verbose_name="Notify on all where reviewer", + ) + notify_all_committer = models.BooleanField( + null=False, + blank=False, + default=False, + verbose_name="Notify on all where committer", + ) def __str__(self): return str(self.user) diff --git a/pgcommitfest/userprofile/util.py b/pgcommitfest/userprofile/util.py index 92359b7a..78a17597 100644 --- a/pgcommitfest/userprofile/util.py +++ b/pgcommitfest/userprofile/util.py @@ -1,7 +1,7 @@ -from email.utils import formataddr from email.header import Header +from email.utils import formataddr -from .models import UserProfile, UserExtraEmail +from .models import UserExtraEmail, UserProfile class UserWrapper(object): @@ -21,16 +21,23 @@ def email(self): @property def encoded_email_header(self): - return formataddr(( - str(Header("%s %s" % (self.user.first_name, self.user.last_name), 'utf-8')), - self.email)) + return formataddr( + ( + str( + Header( + "%s %s" % (self.user.first_name, self.user.last_name), "utf-8" + ) + ), + self.email, + ) + ) def handle_user_data(sender, **kwargs): - user = kwargs.pop('user') - userdata = kwargs.pop('userdata') + user = kwargs.pop("user") + userdata = kwargs.pop("userdata") - secondary = userdata.get('secondaryemails', []) + secondary = userdata.get("secondaryemails", []) # Remove any email attached to this user that are not upstream. Since the foreign keys # are set to SET_NULL, they will all revert to being the users default in this case. diff --git a/pgcommitfest/userprofile/views.py b/pgcommitfest/userprofile/views.py index 4c22bac0..f33216bd 100644 --- a/pgcommitfest/userprofile/views.py +++ b/pgcommitfest/userprofile/views.py @@ -1,11 +1,11 @@ -from django.shortcuts import render -from django.http import HttpResponseRedirect -from django.db import transaction from django.contrib import messages from django.contrib.auth.decorators import login_required +from django.db import transaction +from django.http import HttpResponseRedirect +from django.shortcuts import render -from .models import UserProfile from .forms import UserProfileForm +from .models import UserProfile @login_required @@ -13,15 +13,19 @@ def userprofile(request): (profile, created) = UserProfile.objects.get_or_create(user=request.user) - if request.method == 'POST': + if request.method == "POST": form = UserProfileForm(request.user, request.POST, instance=profile) if form.is_valid(): form.save() messages.add_message(request, messages.INFO, "User profile saved.") - return HttpResponseRedirect('.') + return HttpResponseRedirect(".") else: form = UserProfileForm(request.user, instance=profile) - return render(request, 'userprofileform.html', { - 'form': form, - }) + return render( + request, + "userprofileform.html", + { + "form": form, + }, + ) diff --git a/pgcommitfest/wsgi.py b/pgcommitfest/wsgi.py index 821a477e..cbd1c2fb 100644 --- a/pgcommitfest/wsgi.py +++ b/pgcommitfest/wsgi.py @@ -7,10 +7,10 @@ https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/ """ -import os - from django.core.wsgi import get_wsgi_application +import os + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pgcommitfest.settings") application = get_wsgi_application() diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..00d5986f --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,22 @@ +[tool.ruff] +# line-length = 120 + +[tool.ruff.format] +# quote-style = "preserve" + +[tool.ruff.lint] +extend-select = ["I"] + +[tool.ruff.lint.isort] +section-order = [ + "future", + "django", + "standard-library", + "third-party", + "first-party", + "local-folder", +] + +[tool.ruff.lint.isort.sections] +# Group all Django imports into a separate section. +"django" = ["django"] diff --git a/run_dev.py b/run_dev.py index 6c8189db..543c4ebf 100755 --- a/run_dev.py +++ b/run_dev.py @@ -11,9 +11,10 @@ we have this tiny script that will find the path to the Django admin static files and run uWSGI with the correct path. """ -from importlib.machinery import PathFinder + import subprocess import sys +from importlib.machinery import PathFinder django_path = PathFinder().find_spec("django").submodule_search_locations[0] diff --git a/setup.cfg b/setup.cfg index 1bf7d725..b5a80bc6 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,4 +1,4 @@ [pycodestyle] statistics=True -ignore=E402,E501 +ignore=E402,E501,W503 max-line-length=120 diff --git a/tools/commitfest/check_patches_in_archives.py b/tools/commitfest/check_patches_in_archives.py index 148de2bf..e68150ce 100755 --- a/tools/commitfest/check_patches_in_archives.py +++ b/tools/commitfest/check_patches_in_archives.py @@ -7,30 +7,34 @@ # so we don't block the archives unnecessarily. # +import logging import os import sys -import requests + import magic -import logging +import requests # Set up for accessing django -sys.path.append(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), '../../')) +sys.path.append(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), "../../")) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pgcommitfest.settings") -import django +import django # noqa: E402 + django.setup() -from django.db import connection -from django.conf import settings +from django.conf import settings # noqa: E402 +from django.db import connection # noqa: E402 -from pgcommitfest.commitfest.models import MailThreadAttachment +from pgcommitfest.commitfest.models import MailThreadAttachment # noqa: E402 if __name__ == "__main__": debug = "--debug" in sys.argv # Logging always done to stdout, but we can turn on/off how much - logging.basicConfig(format='%(asctime)s %(levelname)s: %(msg)s', - level=debug and logging.DEBUG or logging.INFO, - stream=sys.stdout) + logging.basicConfig( + format="%(asctime)s %(levelname)s: %(msg)s", + level=debug and logging.DEBUG or logging.INFO, + stream=sys.stdout, + ) mag = magic.open(magic.MIME) mag.load() @@ -47,12 +51,14 @@ logging.debug("Checking attachment %s" % a.attachmentid) resp = requests.get( - "http{0}://{1}:{2}{3}".format(settings.ARCHIVES_PORT == 443 and 's' or '', - settings.ARCHIVES_SERVER, - settings.ARCHIVES_PORT, - url), + "http{0}://{1}:{2}{3}".format( + settings.ARCHIVES_PORT == 443 and "s" or "", + settings.ARCHIVES_SERVER, + settings.ARCHIVES_PORT, + url, + ), headers={ - 'Host': settings.ARCHIVES_HOST, + "Host": settings.ARCHIVES_HOST, }, timeout=settings.ARCHIVES_TIMEOUT, ) @@ -67,7 +73,7 @@ # We don't support gzipped or tar:ed patches or anything like # that at this point - just plain patches. - if mtype.startswith('text/x-diff'): + if mtype.startswith("text/x-diff"): a.ispatch = True else: a.ispatch = False diff --git a/tools/commitfest/update_archive_threads.py b/tools/commitfest/update_archive_threads.py index 9738f25a..b90348b5 100755 --- a/tools/commitfest/update_archive_threads.py +++ b/tools/commitfest/update_archive_threads.py @@ -6,31 +6,36 @@ # at least not all of them all the time... # +import logging import os import sys -import logging # Set up for accessing django -sys.path.append(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), '../../')) +sys.path.append(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), "../../")) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pgcommitfest.settings") -import django +import django # noqa: E402 + django.setup() -from django.db import connection +from django.db import connection # noqa: E402 -from pgcommitfest.commitfest.models import MailThread -from pgcommitfest.commitfest.ajax import refresh_single_thread +from pgcommitfest.commitfest.ajax import refresh_single_thread # noqa: E402 +from pgcommitfest.commitfest.models import MailThread # noqa: E402 if __name__ == "__main__": debug = "--debug" in sys.argv # Logging always done to stdout, but we can turn on/off how much - logging.basicConfig(format='%(asctime)s %(levelname)s: %(msg)s', - level=debug and logging.DEBUG or logging.INFO, - stream=sys.stdout) + logging.basicConfig( + format="%(asctime)s %(levelname)s: %(msg)s", + level=debug and logging.DEBUG or logging.INFO, + stream=sys.stdout, + ) logging.debug("Checking for updated mail threads in the archives") - for thread in MailThread.objects.filter(patches__commitfests__status__in=(1, 2, 3)).distinct(): + for thread in MailThread.objects.filter( + patches__commitfests__status__in=(1, 2, 3) + ).distinct(): logging.debug("Checking %s in the archives" % thread.messageid) refresh_single_thread(thread) diff --git a/tools/githook/pre-commit b/tools/githook/pre-commit index c1b36a0e..29d9ba52 100755 --- a/tools/githook/pre-commit +++ b/tools/githook/pre-commit @@ -1,38 +1,30 @@ #!/bin/sh -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD +if git rev-parse --verify HEAD >/dev/null 2>&1; then + against=HEAD else - # Initial commit: diff against an empty tree object - against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 + # Initial commit: diff against an empty tree object + against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 fi -FILES=$(git diff-index --name-only --diff-filter=ACMR --cached $against -- |egrep ".py$") +FILES=$(git diff-index --name-only --diff-filter=ACMR --cached $against -- | egrep ".py$") if [ "$FILES" != "" ]; then # We want to look at the staged version only, so we have to run it once for # each file. E=0 for F in ${FILES}; do - P=$(git show ":$F" | python3 -c "import sys; compile(sys.stdin.read(), '/dev/null', 'exec')") - if [ "$?" != "0" ]; then - echo "Errors in $F" - echo $P - E=1 - continue - fi + if ! git show ":$F" | ruff check - --quiet --stdin-filename "$F"; then + E=1 + fi - R=$(git show ":$F" | pycodestyle -) - if [ "$?" != "0" ]; then - echo "Errors in $F" - echo "$R" - E=1 - fi + if ! git show ":$F" | ruff format - --quiet --check; then + E=1 + echo "Formatting errors in $F, run 'make format'" + fi done if [ "$E" != "0" ]; then - exit 1 + exit 1 fi echo Basic python checks passed. fi - diff --git a/tools/mail/send_queued_mail.py b/tools/mail/send_queued_mail.py index 97577595..21b31ded 100755 --- a/tools/mail/send_queued_mail.py +++ b/tools/mail/send_queued_mail.py @@ -8,19 +8,20 @@ # by one. # -import sys import os import smtplib +import sys # Set up to run in django environment -sys.path.append(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), '../../')) +sys.path.append(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), "../../")) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pgcommitfest.settings") -import django +import django # noqa: E402 + django.setup() -from django.db import connection, transaction +from django.db import connection, transaction # noqa: E402 -from pgcommitfest.mailqueue.models import QueuedMail +from pgcommitfest.mailqueue.models import QueuedMail # noqa: E402 if __name__ == "__main__": # Grab advisory lock, if available. Lock id is just a random number @@ -38,7 +39,7 @@ # If it fails we'll throw an exception and just come back on the # next cron job. And local delivery should never fail... smtp = smtplib.SMTP("localhost") - smtp.sendmail(m.sender, m.receiver, m.fullmsg.encode('utf-8')) + smtp.sendmail(m.sender, m.receiver, m.fullmsg.encode("utf-8")) smtp.close() m.delete() transaction.commit()