diff --git a/.editorconfig b/.editorconfig
new file mode 100644
index 00000000..b1eff40a
--- /dev/null
+++ b/.editorconfig
@@ -0,0 +1,17 @@
+# top-most EditorConfig file
+root = true
+
+# basic rules for all files
+[*]
+end_of_line = lf
+insert_final_newline = true
+charset = utf-8
+trim_trailing_whitespace = true
+
+[*.{py,js}]
+indent_style = space
+indent_size = 4
+
+[*.html]
+indent_style = space
+indent_size = 1
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
new file mode 100644
index 00000000..6fa14369
--- /dev/null
+++ b/.github/workflows/ci.yaml
@@ -0,0 +1,29 @@
+name: CI
+on:
+ push:
+ branches: ["master"]
+ pull_request:
+
+jobs:
+ format:
+ runs-on: ubuntu-24.04
+ name: "Linting and formatting"
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+
+ - name: Run ruff check
+ uses: astral-sh/ruff-action@v2
+
+ - name: Run ruff format --check
+ uses: astral-sh/ruff-action@v2
+ with:
+ args: "format --check"
+
+ - name: Setup Biome
+ uses: biomejs/setup-biome@v2
+ with:
+ version: latest
+
+ - name: Run Biome
+ run: biome ci .
diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yaml
new file mode 100644
index 00000000..ca2edde2
--- /dev/null
+++ b/.github/workflows/deploy.yaml
@@ -0,0 +1,16 @@
+name: Deploy
+
+on:
+ push:
+ branches:
+ - main
+ - prod
+
+jobs:
+ deployment:
+ runs-on: ubuntu-latest
+ environment: ${{ github.ref_name }}
+ steps:
+ - name: Trigger deploy
+ run: |
+ curl -fsS -X POST ${{ secrets.HOOKURL }} -H "X-Key: ${{ secrets.HOOKSECRET }}"
diff --git a/.github/workflows/transferdb.yaml b/.github/workflows/transferdb.yaml
new file mode 100644
index 00000000..b2884e96
--- /dev/null
+++ b/.github/workflows/transferdb.yaml
@@ -0,0 +1,13 @@
+name: TransferDB
+
+# Manually triggered only
+on: workflow_dispatch
+
+jobs:
+ transferdb:
+ runs-on: ubuntu-latest
+ environment: admin
+ steps:
+ - name: Trigger db transfer
+ run: |
+ curl -fsS -X POST ${{ secrets.TRANSFER_URL }} -H "X-Key: ${{ secrets.TRANSFER_KEY }}"
diff --git a/Makefile b/Makefile
new file mode 100644
index 00000000..9b6ed017
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,11 @@
+format:
+ ruff format
+ npx @biomejs/biome format --write
+
+lint:
+ ruff check
+ npx @biomejs/biome check
+
+lint-fix:
+ ruff check --fix
+ npx @biomejs/biome check --fix
diff --git a/README.md b/README.md
index a379f53d..a92e865f 100644
--- a/README.md
+++ b/README.md
@@ -6,12 +6,14 @@ A commitfest is a collection of patches and reviews for a project and is part of
## The Application
-This is a Django 3.2 application backed by PostgreSQL and running on Python 3.x.
+This is a Django 4.2 application backed by PostgreSQL and running on Python 3.x.
## Getting Started
### Ubuntu instructions
+#### Install Dependencies / Configure Environment
+
First, prepare your development environment by installing pip, virtualenv, and postgresql-server-dev-X.Y.
```bash
@@ -45,12 +47,24 @@ be provided.
./manage.py migrate
```
-You'll need either a database dump of the actual server's data or else to create a superuser:
+#### Load data
+For a quick start, you can load some dummy data into the database. Here's how you do that:
+
+```
+./manage.py loaddata auth_data.json
+./manage.py loaddata commitfest_data.json
+```
+
+If you do this, the admin username and password are `admin` and `admin`.
+
+On the other hand, if you'd like to start from scratch instead, you can run the following command to create
+a super user:
```bash
./manage.py createsuperuser
```
+#### Start application
Finally, you're ready to start the application:
```bash
@@ -62,10 +76,41 @@ admin interface, go back to the main interface. You're now logged in.
## Contributing
-Before committing make sure to install the git pre-commit hook to adhere to the
-codestyle.
+Code formatting and linting is done using [`ruff`] and [`biome`]. You can run
+formatting using `make format`. Linting can be done using `make lint` and
+automatic fixing of linting errors can be done using `make lint-fix`. CI checks
+that you adhere to these coding standards.
+
+You can install the git pre-commit hook to help you adhere to the codestyle:
```bash
ln -s ../../tools/githook/pre-commit .git/hooks/
+```
+
+[`ruff`]: https://docs.astral.sh/ruff/
+[`biome`]: https://biomejs.dev/
+### Discord
+
+If you want to discuss development of a fix/feature over chat. Please join the
+`#commitfest-dev` channel on the ["PostgreSQL Hacking" Discord server][1]
+
+[1]: https://discord.gg/XZy2DXj7Wz
+
+### Staging server
+
+The staging server is available at:
+User and password for the HTTP authentication popup are both `pgtest`. The
+`main` branch is automatically deployed to the staging server. After some time
+on the staging server, commits will be merged into the `prod` branch, which
+automatically deploys to the production server.
+
+### Regenerating the database dump files
+
+If you'd like to regenerate the database dump files, you can run the following commands:
```
+./manage.py dumpdata auth --format=json --indent=4 --exclude=auth.permission > pgcommitfest/commitfest/fixtures/auth_data.json
+./manage.py dumpdata commitfest --format=json --indent=4 > pgcommitfest/commitfest/fixtures/commitfest_data.json
+```
+
+If you want to reload data from dump file, you can run `drop owned by postgres;` in the `pgcommitfest` database first.
diff --git a/biome.json b/biome.json
new file mode 100644
index 00000000..1c3d6648
--- /dev/null
+++ b/biome.json
@@ -0,0 +1,36 @@
+{
+ "$schema": "https://biomejs.dev/schemas/1.9.4/schema.json",
+ "vcs": {
+ "enabled": false,
+ "clientKind": "git",
+ "useIgnoreFile": false
+ },
+ "files": {
+ "ignoreUnknown": true,
+ "ignore": [],
+ "include": [
+ "media/commitfest/js/commitfest.js",
+ "media/commitfest/css/commitfest.css",
+ "biome.json"
+ ]
+ },
+ "formatter": {
+ "enabled": true,
+ "indentStyle": "space",
+ "indentWidth": 4
+ },
+ "organizeImports": {
+ "enabled": true
+ },
+ "linter": {
+ "enabled": true,
+ "rules": {
+ "recommended": true
+ }
+ },
+ "javascript": {
+ "formatter": {
+ "quoteStyle": "double"
+ }
+ }
+}
diff --git a/dev_requirements.txt b/dev_requirements.txt
index cedd81ce..66d7ae13 100644
--- a/dev_requirements.txt
+++ b/dev_requirements.txt
@@ -1,3 +1,4 @@
-r requirements.txt
uwsgi
pycodestyle
+ruff
diff --git a/media/commitfest/css/commitfest.css b/media/commitfest/css/commitfest.css
index 07bfa102..fe6f6b80 100644
--- a/media/commitfest/css/commitfest.css
+++ b/media/commitfest/css/commitfest.css
@@ -4,47 +4,45 @@
/* For close button with float disabled */
.close-nofloat {
- float: none !important;
+ float: none !important;
}
/* General form styling */
.form-horizontal div.form-group {
- margin-bottom: 10px;
+ margin-bottom: 10px;
}
div.form-group div.controls ul {
- list-style-type: none;
- margin: 0px;
- padding: 0px;
+ list-style-type: none;
+ margin: 0px;
+ padding: 0px;
}
div.form-group div.controls ul li {
- display: inline;
+ display: inline;
}
div.form-group div.controls ul li label {
- display: inline;
- font-weight: normal;
- vertical-align:middle;
+ display: inline;
+ font-weight: normal;
+ vertical-align: middle;
}
div.form-group div.controls ul li label input {
- display: inline;
- vertical-align:middle;
+ display: inline;
+ vertical-align: middle;
}
-div.form-group div.controls input[type='checkbox'] {
- width: 10px;
+div.form-group div.controls input[type="checkbox"] {
+ width: 10px;
}
div.form-group div.controls input.threadpick-input {
- width: 80%;
- display: inline;
+ width: 80%;
+ display: inline;
}
-
-
/*
* Attach thread dialog
*/
#attachThreadListWrap.loading {
display: block;
- background: url('/media/commitfest/spinner.gif') no-repeat center;
+ background: url("/media/commitfest/spinner.gif") no-repeat center;
width: 124px;
height: 124px;
margin: 0 auto;
@@ -57,7 +55,7 @@ div.form-group div.controls input.threadpick-input {
* Annotate message dialog */
#annotateMessageBody.loading {
display: block;
- background: url('/media/commitfest/spinner.gif') no-repeat center;
+ background: url("/media/commitfest/spinner.gif") no-repeat center;
width: 124px;
height: 124px;
margin: 0 auto;
@@ -65,3 +63,21 @@ div.form-group div.controls input.threadpick-input {
#annotateMessageBody.loading * {
display: none;
}
+
+.cfbot-summary img {
+ margin-top: -3px;
+}
+
+.github-logo {
+ height: 20px;
+}
+
+.additions {
+ font-weight: bold;
+ color: green;
+}
+
+.deletions {
+ font-weight: bold;
+ color: red;
+}
diff --git a/media/commitfest/github-mark.svg b/media/commitfest/github-mark.svg
new file mode 100644
index 00000000..37fa923d
--- /dev/null
+++ b/media/commitfest/github-mark.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/media/commitfest/js/commitfest.js b/media/commitfest/js/commitfest.js
index 2f1eddae..99b3f021 100644
--- a/media/commitfest/js/commitfest.js
+++ b/media/commitfest/js/commitfest.js
@@ -1,308 +1,382 @@
function verify_reject() {
- return confirm('Are you sure you want to close this patch as Rejected?\n\nThis should only be done when a patch will never be applied - if more work is needed, it should instead be set to "Returned with Feedback" or "Moved to next CF".\n\nSo - are you sure?');
+ return confirm(
+ 'Are you sure you want to close this patch as Rejected?\n\nThis should only be done when a patch will never be applied - if more work is needed, it should instead be set to "Returned with Feedback" or "Moved to next CF".\n\nSo - are you sure?',
+ );
}
function verify_withdrawn() {
- return confirm('Are you sure you want to close this patch as Withdrawn?\n\nThis should only be done when the author voluntarily withdraws the patch.\n\nSo - are you sure?');
+ return confirm(
+ "Are you sure you want to close this patch as Withdrawn?\n\nThis should only be done when the author voluntarily withdraws the patch.\n\nSo - are you sure?",
+ );
}
function verify_returned() {
- return confirm('Are you sure you want to close this patch as Returned with Feedback?\n\nThis should be done if the patch is expected to be finished at some future time, but not necessarily in the next commitfest. If work is undergoing and expected in the next commitfest, it should instead be set to "Moved to next CF".\n\nSo - are you sure?');
+ return confirm(
+ 'Are you sure you want to close this patch as Returned with Feedback?\n\nThis should be done if the patch is expected to be finished at some future time, but not necessarily in the next commitfest. If work is undergoing and expected in the next commitfest, it should instead be set to "Moved to next CF".\n\nSo - are you sure?',
+ );
}
function verify_next() {
- return confirm('Are you sure you want to move this patch to the next commitfest?\n\nThis means the patch will be marked as closed in this commitfest, but will automatically be moved to the next one. If no further work is expected on this patch, it should be closed with "Rejected" or "Returned with Feedback" instead.\n\nSo - are you sure?');
+ return confirm(
+ 'Are you sure you want to move this patch to the next commitfest?\n\nThis means the patch will be marked as closed in this commitfest, but will automatically be moved to the next one. If no further work is expected on this patch, it should be closed with "Rejected" or "Returned with Feedback" instead.\n\nSo - are you sure?',
+ );
}
function findLatestThreads() {
- $('#attachThreadListWrap').addClass('loading');
- $('#attachThreadSearchButton').addClass('disabled');
- $.get('/ajax/getThreads/', {
- 's': $('#attachThreadSearchField').val(),
- 'a': $('#attachThreadAttachOnly').val(),
- }).success(function(data) {
- sel = $('#attachThreadList');
- sel.find('option').remove();
- $.each(data, function(m,i) {
- sel.append($('').text(i.from + ': ' + i.subj + ' (' + i.date + ')').val(i.msgid));
- });
- }).always(function() {
- $('#attachThreadListWrap').removeClass('loading');
- $('#attachThreadSearchButton').removeClass('disabled');
- attachThreadChanged();
- });
- return false;
+ $("#attachThreadListWrap").addClass("loading");
+ $("#attachThreadSearchButton").addClass("disabled");
+ $.get("/ajax/getThreads/", {
+ s: $("#attachThreadSearchField").val(),
+ a: $("#attachThreadAttachOnly").val(),
+ })
+ .success((data) => {
+ sel = $("#attachThreadList");
+ sel.find("option").remove();
+ $.each(data, (m, i) => {
+ sel.append(
+ $("")
+ .text(`${i.from}: ${i.subj} (${i.date})`)
+ .data("subject", i.subj)
+ .val(i.msgid),
+ );
+ });
+ })
+ .always(() => {
+ $("#attachThreadListWrap").removeClass("loading");
+ $("#attachThreadSearchButton").removeClass("disabled");
+ attachThreadChanged();
+ });
+ return false;
}
function browseThreads(attachfunc, closefunc) {
- $('#attachThreadList').find('option').remove();
- $('#attachThreadMessageId').val('');
- $('#attachModal').off('hidden.bs.modal');
- $('#attachModal').on('hidden.bs.modal', function(e) {
- if (closefunc) closefunc();
- });
- $('#attachModal').modal();
- findLatestThreads();
-
- $('#doAttachThreadButton').unbind('click');
- $('#doAttachThreadButton').click(function() {
- msgid = $('#attachThreadMessageId').val();
- if (!msgid || msgid == '') {
- msgid = $('#attachThreadList').val();
- if (!msgid) return;
- }
+ $("#attachThreadList").find("option").remove();
+ $("#attachThreadMessageId").val("");
+ $("#attachModal").off("hidden.bs.modal");
+ $("#attachModal").on("hidden.bs.modal", (e) => {
+ if (closefunc) closefunc();
+ });
+ $("#attachModal").modal();
+ findLatestThreads();
- $('#attachThreadListWrap').addClass('loading');
- $('#attachThreadSearchButton').addClass('disabled');
- $('#attachThreadButton').addClass('disabled');
- if (attachfunc(msgid)) {
- $('#attachModal').modal('hide');
- }
- $('#attachThreadListWrap').removeClass('loading');
- $('#attachThreadSearchButton').removeClass('disabled');
- attachThreadChanged();
- });
+ $("#doAttachThreadButton").unbind("click");
+ $("#doAttachThreadButton").click(() => {
+ msgid = $("#attachThreadMessageId").val();
+ if (!msgid || msgid === "") {
+ msgid = $("#attachThreadList").val();
+ if (!msgid) return;
+ subject = $("#attachThreadList option:selected").data("subject");
+ subject = subject.replace(/\bre: /gi, "");
+ subject = subject.replace(/\bfwd: /gi, "");
+ // Strips [PATCH], [POC], etc. prefixes
+ subject = subject.replace(/\[\w+\]: /gi, "");
+ subject = subject.replace(/\[\w+\] /gi, "");
+ }
+ $("#attachThreadListWrap").addClass("loading");
+ $("#attachThreadSearchButton").addClass("disabled");
+ $("#attachThreadButton").addClass("disabled");
+ if (attachfunc(msgid, subject)) {
+ $("#attachModal").modal("hide");
+ }
+ $("#attachThreadListWrap").removeClass("loading");
+ $("#attachThreadSearchButton").removeClass("disabled");
+ attachThreadChanged();
+ });
}
function attachThread(cfid, patchid, closefunc) {
- browseThreads(function(msgid) {
- doAttachThread(cfid, patchid, msgid, !closefunc);
- if (closefunc) {
- /* We don't really care about closing it, we just reload immediately */
- closefunc();
- }
- },
- function() {
- if (closefunc) closefunc();
- });
+ browseThreads(
+ (msgid) => {
+ doAttachThread(cfid, patchid, msgid, !closefunc);
+ if (closefunc) {
+ /* We don't really care about closing it, we just reload immediately */
+ closefunc();
+ }
+ },
+ () => {
+ if (closefunc) closefunc();
+ },
+ );
}
function detachThread(cfid, patchid, msgid) {
- if (confirm('Are you sure you want to detach the thread with messageid "' + msgid + '" from this patch?')) {
- $.post('/ajax/detachThread/', {
- 'cf': cfid,
- 'p': patchid,
- 'msg': msgid,
- }).success(function(data) {
- location.reload();
- }).fail(function(data) {
- alert('Failed to detach thread!');
- });
- }
+ if (
+ confirm(
+ `Are you sure you want to detach the thread with messageid "${msgid}" from this patch?`,
+ )
+ ) {
+ $.post("/ajax/detachThread/", {
+ cf: cfid,
+ p: patchid,
+ msg: msgid,
+ })
+ .success((data) => {
+ location.reload();
+ })
+ .fail((data) => {
+ alert("Failed to detach thread!");
+ });
+ }
}
function attachThreadChanged() {
- if ($('#attachThreadList').val() || $('#attachThreadMessageId').val()) {
- $('#doAttachThreadButton').removeClass('disabled');
- }
- else {
- $('#doAttachThreadButton').addClass('disabled');
- }
+ if ($("#attachThreadList").val() || $("#attachThreadMessageId").val()) {
+ $("#doAttachThreadButton").removeClass("disabled");
+ } else {
+ $("#doAttachThreadButton").addClass("disabled");
+ }
}
function doAttachThread(cfid, patchid, msgid, reloadonsuccess) {
- $.post('/ajax/attachThread/', {
- 'cf': cfid,
- 'p': patchid,
- 'msg': msgid,
- }).success(function(data) {
- if (data != 'OK') {
- alert(data);
- }
- if (reloadonsuccess)
- location.reload();
- return true;
- }).fail(function(data) {
- if (data.status == 404) {
- alert('Message with messageid ' + msgid + ' not found');
- }
- else if (data.status == 503) {
- alert('Failed to attach thread: ' + data.responseText);
- }
- else {
- alert('Failed to attach thread: ' + data.statusText);
- }
- return false;
- });
+ $.post("/ajax/attachThread/", {
+ cf: cfid,
+ p: patchid,
+ msg: msgid,
+ })
+ .success((data) => {
+ if (data !== "OK") {
+ alert(data);
+ }
+ if (reloadonsuccess) location.reload();
+ return true;
+ })
+ .fail((data) => {
+ if (data.status === 404) {
+ alert(`Message with messageid ${msgid} not found`);
+ } else if (data.status === 503) {
+ alert(`Failed to attach thread: ${data.responseText}`);
+ } else {
+ alert(`Failed to attach thread: ${data.statusText}`);
+ }
+ return false;
+ });
}
function updateAnnotationMessages(threadid) {
- $('#annotateMessageBody').addClass('loading');
- $('#doAnnotateMessageButton').addClass('disabled');
- $.get('/ajax/getMessages', {
- 't': threadid,
- }).success(function(data) {
- sel = $('#annotateMessageList')
- sel.find('option').remove();
- sel.append('');
- $.each(data, function(i,m) {
- sel.append('');
- });
- }).always(function() {
- $('#annotateMessageBody').removeClass('loading');
- });
+ $("#annotateMessageBody").addClass("loading");
+ $("#doAnnotateMessageButton").addClass("disabled");
+ $.get("/ajax/getMessages", {
+ t: threadid,
+ })
+ .success((data) => {
+ sel = $("#annotateMessageList");
+ sel.find("option").remove();
+ sel.append('');
+ $.each(data, (i, m) => {
+ sel.append(
+ ``,
+ );
+ });
+ })
+ .always(() => {
+ $("#annotateMessageBody").removeClass("loading");
+ });
}
function addAnnotation(threadid) {
- $('#annotateThreadList').find('option').remove();
- $('#annotateMessage').val('');
- $('#annotateMsgId').val('');
- $('#annotateModal').modal();
- $('#annotateThreadList').focus();
+ $("#annotateThreadList").find("option").remove();
+ $("#annotateMessage").val("");
+ $("#annotateMsgId").val("");
+ $("#annotateModal").modal();
+ $("#annotateThreadList").focus();
updateAnnotationMessages(threadid);
- $('#doAnnotateMessageButton').unbind('click');
- $('#doAnnotateMessageButton').click(function() {
- var msg = $('#annotateMessage').val();
- if (msg.length >= 500) {
- alert('Maximum length for an annotation is 500 characters.\nYou should probably post an actual message in the thread!');
- return;
- }
- $('#doAnnotateMessageButton').addClass('disabled');
- $('#annotateMessageBody').addClass('loading');
- $.post('/ajax/annotateMessage/', {
- 't': threadid,
- 'msgid': $.trim($('#annotateMsgId').val()),
- 'msg': msg
- }).success(function(data) {
- if (data != 'OK') {
- alert(data);
- $('#annotateMessageBody').removeClass('loading');
- }
- else {
- $('#annotateModal').modal('hide');
- location.reload();
- }
- }).fail(function(data) {
- alert('Failed to annotate message');
- $('#annotateMessageBody').removeClass('loading');
- });
+ $("#doAnnotateMessageButton").unbind("click");
+ $("#doAnnotateMessageButton").click(() => {
+ const msg = $("#annotateMessage").val();
+ if (msg.length >= 500) {
+ alert(
+ "Maximum length for an annotation is 500 characters.\nYou should probably post an actual message in the thread!",
+ );
+ return;
+ }
+ $("#doAnnotateMessageButton").addClass("disabled");
+ $("#annotateMessageBody").addClass("loading");
+ $.post("/ajax/annotateMessage/", {
+ t: threadid,
+ msgid: $.trim($("#annotateMsgId").val()),
+ msg: msg,
+ })
+ .success((data) => {
+ if (data !== "OK") {
+ alert(data);
+ $("#annotateMessageBody").removeClass("loading");
+ } else {
+ $("#annotateModal").modal("hide");
+ location.reload();
+ }
+ })
+ .fail((data) => {
+ alert("Failed to annotate message");
+ $("#annotateMessageBody").removeClass("loading");
+ });
});
}
function annotateMsgPicked() {
- var val = $('#annotateMessageList').val();
+ const val = $("#annotateMessageList").val();
if (val) {
- $('#annotateMsgId').val(val);
- annotateChanged();
+ $("#annotateMsgId").val(val);
+ annotateChanged();
}
}
function annotateChanged() {
/* Enable/disable the annotate button */
- if ($('#annotateMessage').val() != '' && $('#annotateMsgId').val()) {
- $('#doAnnotateMessageButton').removeClass('disabled');
- }
- else {
- $('#doAnnotateMessageButton').addClass('disabled');
+ if ($("#annotateMessage").val() !== "" && $("#annotateMsgId").val()) {
+ $("#doAnnotateMessageButton").removeClass("disabled");
+ } else {
+ $("#doAnnotateMessageButton").addClass("disabled");
}
}
function deleteAnnotation(annid) {
- if (confirm('Are you sure you want to delete this annotation?')) {
- $.post('/ajax/deleteAnnotation/', {
- 'id': annid,
- }).success(function(data) {
- location.reload();
- }).fail(function(data) {
- alert('Failed to delete annotation!');
- });
+ if (confirm("Are you sure you want to delete this annotation?")) {
+ $.post("/ajax/deleteAnnotation/", {
+ id: annid,
+ })
+ .success((data) => {
+ location.reload();
+ })
+ .fail((data) => {
+ alert("Failed to delete annotation!");
+ });
}
}
function flagCommitted(committer) {
- $('#commitModal').modal();
- $('#committerSelect').val(committer);
- $('#doCommitButton').unbind('click');
- $('#doCommitButton').click(function() {
- var c = $('#committerSelect').val();
- if (!c) {
- alert('You need to select a committer before you can mark a patch as committed!');
- return;
- }
- document.location.href='close/committed/?c=' + c;
- });
- return false;
+ $("#commitModal").modal();
+ $("#committerSelect").val(committer);
+ $("#doCommitButton").unbind("click");
+ $("#doCommitButton").click(() => {
+ const c = $("#committerSelect").val();
+ if (!c) {
+ alert(
+ "You need to select a committer before you can mark a patch as committed!",
+ );
+ return;
+ }
+ document.location.href = `close/committed/?c=${c}`;
+ });
+ return false;
}
-
function sortpatches(sortby) {
- $('#id_sortkey').val(sortby);
+ let sortkey = $('#id_sortkey').val()
+ if (sortkey == sortby) {
+ $('#id_sortkey').val(-sortby)
+ } else if(-sortkey == sortby){
+ $('#id_sortkey').val(0)
+ } else {
+ $('#id_sortkey').val(sortby);
+ }
$('#filterform').submit();
- return false;
+ return false;
}
function toggleButtonCollapse(buttonId, collapseId) {
- $('#' + buttonId).button('toggle');
- $('#' + collapseId).toggleClass('in')
+ $(`#${buttonId}`).button("toggle");
+ $(`#${collapseId}`).toggleClass("in");
}
function togglePatchFilterButton(buttonId, collapseId) {
- /* Figure out if we are collapsing it */
- if ($('#' + collapseId).hasClass('in')) {
- /* Go back to ourselves without a querystring to reset the form, unless it's already empty */
- if (document.location.href.indexOf('?') > -1) {
- document.location.href = '.';
- return;
- }
- }
+ /* Figure out if we are collapsing it */
+ if ($(`#${collapseId}`).hasClass("in")) {
+ /* Go back to ourselves without a querystring to reset the form, unless it's already empty */
+ if (document.location.href.indexOf("?") > -1) {
+ document.location.href = ".";
+ return;
+ }
+ }
- toggleButtonCollapse(buttonId, collapseId);
+ toggleButtonCollapse(buttonId, collapseId);
}
-
/*
* Upstream user search dialog
*/
function search_and_store_user() {
- $('#doSelectUserButton').unbind('click');
- $('#doSelectUserButton').click(function() {
- if (!$('#searchUserList').val()) { return false; }
+ $("#doSelectUserButton").unbind("click");
+ $("#doSelectUserButton").click(() => {
+ if (!$("#searchUserList").val()) {
+ return false;
+ }
- /* Create this user locally */
- $.get('/ajax/importUser/', {
- 'u': $('#searchUserList').val(),
- }).success(function(data) {
- if (data == 'OK') {
- alert('User imported!');
- $('#searchUserModal').modal('hide');
- } else {
- alert('Failed to import user: ' + data);
- }
- }).fail(function(data, statustxt) {
- alert('Failed to import user: ' + statustxt);
- });
+ /* Create this user locally */
+ $.get("/ajax/importUser/", {
+ u: $("#searchUserList").val(),
+ })
+ .success((data) => {
+ if (data === "OK") {
+ alert("User imported!");
+ $("#searchUserModal").modal("hide");
+ } else {
+ alert(`Failed to import user: ${data}`);
+ }
+ })
+ .fail((data, statustxt) => {
+ alert(`Failed to import user: ${statustxt}`);
+ });
- return false;
+ return false;
});
- $('#searchUserModal').modal();
+ $("#searchUserModal").modal();
}
function findUsers() {
- if (!$('#searchUserSearchField').val()) {
- alert('No search term specified');
- return false;
+ if (!$("#searchUserSearchField").val()) {
+ alert("No search term specified");
+ return false;
}
- $('#searchUserListWrap').addClass('loading');
- $('#searchUserSearchButton').addClass('disabled');
- $.get('/ajax/searchUsers/', {
- 's': $('#searchUserSearchField').val(),
- }).success(function(data) {
- sel = $('#searchUserList');
- sel.find('option').remove();
- $.each(data, function(i,u) {
- sel.append('');
+ $("#searchUserListWrap").addClass("loading");
+ $("#searchUserSearchButton").addClass("disabled");
+ $.get("/ajax/searchUsers/", {
+ s: $("#searchUserSearchField").val(),
+ })
+ .success((data) => {
+ sel = $("#searchUserList");
+ sel.find("option").remove();
+ $.each(data, (i, u) => {
+ sel.append(
+ ``,
+ );
+ });
+ })
+ .always(() => {
+ $("#searchUserListWrap").removeClass("loading");
+ $("#searchUserSearchButton").removeClass("disabled");
+ searchUserListChanged();
});
- }).always(function() {
- $('#searchUserListWrap').removeClass('loading');
- $('#searchUserSearchButton').removeClass('disabled');
- searchUserListChanged();
- });
- return false;
+ return false;
}
function searchUserListChanged() {
- if ($('#searchUserList').val()) {
- $('#doSelectUserButton').removeClass('disabled');
- }
- else {
- $('#doSelectUserButton').addClass('disabled');
- }
+ if ($("#searchUserList").val()) {
+ $("#doSelectUserButton").removeClass("disabled");
+ } else {
+ $("#doSelectUserButton").addClass("disabled");
+ }
+}
+
+function addGitCheckoutToClipboard(patchId) {
+ navigator.clipboard.writeText(`git remote add commitfest https://github.com/postgresql-cfbot/postgresql.git
+git fetch commitfest cf/${patchId}
+git checkout commitfest/cf/${patchId}
+`);
}
+
+/* Build our button callbacks */
+$(document).ready(() => {
+ $("button.attachThreadButton").each((i, o) => {
+ const b = $(o);
+ b.click(() => {
+ $("#attachThreadAttachOnly").val("1");
+ browseThreads((msgid, subject) => {
+ b.prev().val(msgid);
+ const description_field = $("#id_name");
+ if (description_field.val() === "") {
+ description_field.val(subject);
+ }
+ return true;
+ });
+ return false;
+ });
+ });
+});
diff --git a/media/commitfest/needs_rebase_success.svg b/media/commitfest/needs_rebase_success.svg
new file mode 100644
index 00000000..7f4113ff
--- /dev/null
+++ b/media/commitfest/needs_rebase_success.svg
@@ -0,0 +1,4 @@
+
\ No newline at end of file
diff --git a/media/commitfest/new_failure.svg b/media/commitfest/new_failure.svg
new file mode 100644
index 00000000..ff3012d0
--- /dev/null
+++ b/media/commitfest/new_failure.svg
@@ -0,0 +1,5 @@
+
\ No newline at end of file
diff --git a/media/commitfest/new_success.svg b/media/commitfest/new_success.svg
new file mode 100644
index 00000000..a0d9b7c4
--- /dev/null
+++ b/media/commitfest/new_success.svg
@@ -0,0 +1,4 @@
+
\ No newline at end of file
diff --git a/media/commitfest/old_failure.svg b/media/commitfest/old_failure.svg
new file mode 100644
index 00000000..9d91d6c0
--- /dev/null
+++ b/media/commitfest/old_failure.svg
@@ -0,0 +1,5 @@
+
\ No newline at end of file
diff --git a/media/commitfest/old_success.svg b/media/commitfest/old_success.svg
new file mode 100644
index 00000000..2de4117e
--- /dev/null
+++ b/media/commitfest/old_success.svg
@@ -0,0 +1,4 @@
+
\ No newline at end of file
diff --git a/media/commitfest/running.svg b/media/commitfest/running.svg
new file mode 100644
index 00000000..a137d410
--- /dev/null
+++ b/media/commitfest/running.svg
@@ -0,0 +1,4 @@
+
diff --git a/media/commitfest/waiting_to_start.svg b/media/commitfest/waiting_to_start.svg
new file mode 100644
index 00000000..efd371d4
--- /dev/null
+++ b/media/commitfest/waiting_to_start.svg
@@ -0,0 +1,3 @@
+
\ No newline at end of file
diff --git a/pgcommitfest/auth.py b/pgcommitfest/auth.py
index 9343fc0f..af605119 100644
--- a/pgcommitfest/auth.py
+++ b/pgcommitfest/auth.py
@@ -24,27 +24,27 @@
# directory that's processed before the default django.contrib.admin)
#
-from django.http import HttpResponse, HttpResponseRedirect
-from django.views.decorators.csrf import csrf_exempt
-from django.contrib.auth.models import User
-from django.contrib.auth.backends import ModelBackend
+from django.conf import settings
from django.contrib.auth import login as django_login
from django.contrib.auth import logout as django_logout
-from django.dispatch import Signal
+from django.contrib.auth.backends import ModelBackend
+from django.contrib.auth.models import User
from django.db import transaction
-from django.conf import settings
+from django.dispatch import Signal
+from django.http import HttpResponse, HttpResponseRedirect
+from django.views.decorators.csrf import csrf_exempt
import base64
+import hmac
import json
import socket
-import hmac
-from urllib.parse import urlencode, parse_qs
+import time
+from urllib.parse import parse_qs, urlencode
+
import requests
+from Cryptodome import Random
from Cryptodome.Cipher import AES
from Cryptodome.Hash import SHA
-from Cryptodome import Random
-import time
-
# This signal fires when a user is created based on data from upstream.
auth_user_created_from_upstream = Signal()
@@ -66,24 +66,32 @@ def authenticate(self, username=None, password=None):
# Two regular django views to interact with the login system
####
+
# Handle login requests by sending them off to the main site
def login(request):
- if 'next' in request.GET:
+ if "next" in request.GET:
# Put together an url-encoded dict of parameters we're getting back,
# including a small nonce at the beginning to make sure it doesn't
# encrypt the same way every time.
- s = "t=%s&%s" % (int(time.time()), urlencode({'r': request.GET['next']}))
+ s = "t=%s&%s" % (int(time.time()), urlencode({"r": request.GET["next"]}))
# Now encrypt it
r = Random.new()
iv = r.read(16)
- encryptor = AES.new(SHA.new(settings.SECRET_KEY.encode('ascii')).digest()[:16], AES.MODE_CBC, iv)
- cipher = encryptor.encrypt(s.encode('ascii') + b' ' * (16 - (len(s) % 16))) # pad to 16 bytes
-
- return HttpResponseRedirect("%s?d=%s$%s" % (
- settings.PGAUTH_REDIRECT,
- base64.b64encode(iv, b"-_").decode('utf8'),
- base64.b64encode(cipher, b"-_").decode('utf8'),
- ))
+ encryptor = AES.new(
+ SHA.new(settings.SECRET_KEY.encode("ascii")).digest()[:16], AES.MODE_CBC, iv
+ )
+ cipher = encryptor.encrypt(
+ s.encode("ascii") + b" " * (16 - (len(s) % 16))
+ ) # pad to 16 bytes
+
+ return HttpResponseRedirect(
+ "%s?d=%s$%s"
+ % (
+ settings.PGAUTH_REDIRECT,
+ base64.b64encode(iv, b"-_").decode("utf8"),
+ base64.b64encode(cipher, b"-_").decode("utf8"),
+ )
+ )
else:
return HttpResponseRedirect(settings.PGAUTH_REDIRECT)
@@ -99,21 +107,27 @@ def logout(request):
# Receive an authentication response from the main website and try
# to log the user in.
def auth_receive(request):
- if 's' in request.GET and request.GET['s'] == "logout":
+ if "s" in request.GET and request.GET["s"] == "logout":
# This was a logout request
- return HttpResponseRedirect('/')
+ return HttpResponseRedirect("/")
- if 'i' not in request.GET:
+ if "i" not in request.GET:
return HttpResponse("Missing IV in url!", status=400)
- if 'd' not in request.GET:
+ if "d" not in request.GET:
return HttpResponse("Missing data in url!", status=400)
# Set up an AES object and decrypt the data we received
try:
- decryptor = AES.new(base64.b64decode(settings.PGAUTH_KEY),
- AES.MODE_CBC,
- base64.b64decode(str(request.GET['i']), "-_"))
- s = decryptor.decrypt(base64.b64decode(str(request.GET['d']), "-_")).rstrip(b' ').decode('utf8')
+ decryptor = AES.new(
+ base64.b64decode(settings.PGAUTH_KEY),
+ AES.MODE_CBC,
+ base64.b64decode(str(request.GET["i"]), "-_"),
+ )
+ s = (
+ decryptor.decrypt(base64.b64decode(str(request.GET["d"]), "-_"))
+ .rstrip(b" ")
+ .decode("utf8")
+ )
except UnicodeDecodeError:
return HttpResponse("Badly encoded data found", 400)
except Exception:
@@ -126,23 +140,23 @@ def auth_receive(request):
return HttpResponse("Invalid encrypted data received.", status=400)
# Check the timestamp in the authentication
- if (int(data['t'][0]) < time.time() - 10):
+ if int(data["t"][0]) < time.time() - 10:
return HttpResponse("Authentication token too old.", status=400)
# Update the user record (if any)
try:
- user = User.objects.get(username=data['u'][0])
+ user = User.objects.get(username=data["u"][0])
# User found, let's see if any important fields have changed
changed = []
- if user.first_name != data['f'][0]:
- user.first_name = data['f'][0]
- changed.append('first_name')
- if user.last_name != data['l'][0]:
- user.last_name = data['l'][0]
- changed.append('last_name')
- if user.email != data['e'][0]:
- user.email = data['e'][0]
- changed.append('email')
+ if user.first_name != data["f"][0]:
+ user.first_name = data["f"][0]
+ changed.append("first_name")
+ if user.last_name != data["l"][0]:
+ user.last_name = data["l"][0]
+ changed.append("last_name")
+ if user.email != data["e"][0]:
+ user.email = data["e"][0]
+ changed.append("email")
if changed:
user.save(update_fields=changed)
except User.DoesNotExist:
@@ -152,8 +166,9 @@ def auth_receive(request):
# the database with a different userid. Instead of trying to
# somehow fix that live, give a proper error message and
# have somebody look at it manually.
- if User.objects.filter(email=data['e'][0]).exists():
- return HttpResponse("""A user with email %s already exists, but with
+ if User.objects.filter(email=data["e"][0]).exists():
+ return HttpResponse(
+ """A user with email %s already exists, but with
a different username than %s.
This is almost certainly caused by some legacy data in our database.
@@ -162,26 +177,30 @@ def auth_receive(request):
for you.
We apologize for the inconvenience.
-""" % (data['e'][0], data['u'][0]), content_type='text/plain')
-
- if getattr(settings, 'PGAUTH_CREATEUSER_CALLBACK', None):
- res = getattr(settings, 'PGAUTH_CREATEUSER_CALLBACK')(
- data['u'][0],
- data['e'][0],
- ['f'][0],
- data['l'][0],
+"""
+ % (data["e"][0], data["u"][0]),
+ content_type="text/plain",
+ )
+
+ if getattr(settings, "PGAUTH_CREATEUSER_CALLBACK", None):
+ res = getattr(settings, "PGAUTH_CREATEUSER_CALLBACK")(
+ data["u"][0],
+ data["e"][0],
+ ["f"][0],
+ data["l"][0],
)
# If anything is returned, we'll return that as our result.
# If None is returned, it means go ahead and create the user.
if res:
return res
- user = User(username=data['u'][0],
- first_name=data['f'][0],
- last_name=data['l'][0],
- email=data['e'][0],
- password='setbypluginnotasha1',
- )
+ user = User(
+ username=data["u"][0],
+ first_name=data["f"][0],
+ last_name=data["l"][0],
+ email=data["e"][0],
+ password="setbypluginnotasha1",
+ )
user.save()
auth_user_created_from_upstream.send(sender=auth_receive, user=user)
@@ -193,39 +212,45 @@ def auth_receive(request):
django_login(request, user)
# Signal that we have information about this user
- auth_user_data_received.send(sender=auth_receive, user=user, userdata={
- 'secondaryemails': data['se'][0].split(',') if 'se' in data else []
- })
+ auth_user_data_received.send(
+ sender=auth_receive,
+ user=user,
+ userdata={"secondaryemails": data["se"][0].split(",") if "se" in data else []},
+ )
# Finally, check of we have a data package that tells us where to
# redirect the user.
- if 'd' in data:
- (ivs, datas) = data['d'][0].split('$')
- decryptor = AES.new(SHA.new(settings.SECRET_KEY.encode('ascii')).digest()[:16],
- AES.MODE_CBC,
- base64.b64decode(ivs, b"-_"))
- s = decryptor.decrypt(base64.b64decode(datas, "-_")).rstrip(b' ').decode('utf8')
+ if "d" in data:
+ (ivs, datas) = data["d"][0].split("$")
+ decryptor = AES.new(
+ SHA.new(settings.SECRET_KEY.encode("ascii")).digest()[:16],
+ AES.MODE_CBC,
+ base64.b64decode(ivs, b"-_"),
+ )
+ s = decryptor.decrypt(base64.b64decode(datas, "-_")).rstrip(b" ").decode("utf8")
try:
rdata = parse_qs(s, strict_parsing=True)
except ValueError:
return HttpResponse("Invalid encrypted data received.", status=400)
- if 'r' in rdata:
+ if "r" in rdata:
# Redirect address
- return HttpResponseRedirect(rdata['r'][0])
+ return HttpResponseRedirect(rdata["r"][0])
# No redirect specified, see if we have it in our settings
- if hasattr(settings, 'PGAUTH_REDIRECT_SUCCESS'):
+ if hasattr(settings, "PGAUTH_REDIRECT_SUCCESS"):
return HttpResponseRedirect(settings.PGAUTH_REDIRECT_SUCCESS)
- return HttpResponse("Authentication successful, but don't know where to redirect!", status=500)
+ return HttpResponse(
+ "Authentication successful, but don't know where to redirect!", status=500
+ )
# Receive API calls from upstream, such as push changes to users
@csrf_exempt
def auth_api(request):
- if 'X-pgauth-sig' not in request.headers:
+ if "X-pgauth-sig" not in request.headers:
return HttpResponse("Missing signature header!", status=400)
try:
- sig = base64.b64decode(request.headers['X-pgauth-sig'])
+ sig = base64.b64decode(request.headers["X-pgauth-sig"])
except Exception:
return HttpResponse("Invalid signature header!", status=400)
@@ -233,7 +258,7 @@ def auth_api(request):
h = hmac.digest(
base64.b64decode(settings.PGAUTH_KEY),
msg=request.body,
- digest='sha512',
+ digest="sha512",
)
if not hmac.compare_digest(h, sig):
return HttpResponse("Invalid signature!", status=401)
@@ -261,26 +286,38 @@ def _conditionally_update_record(rectype, recordkey, structkey, fieldmap, struct
return None
# Process the received structure
- if pushstruct.get('type', None) == 'update':
+ if pushstruct.get("type", None) == "update":
# Process updates!
with transaction.atomic():
- for u in pushstruct.get('users', []):
+ for u in pushstruct.get("users", []):
user = _conditionally_update_record(
User,
- 'username', 'username',
+ "username",
+ "username",
{
- 'firstname': 'first_name',
- 'lastname': 'last_name',
- 'email': 'email',
+ "firstname": "first_name",
+ "lastname": "last_name",
+ "email": "email",
},
u,
)
# Signal that we have information about this user (only if it exists)
if user:
- auth_user_data_received.send(sender=auth_api, user=user, userdata={
- k: u[k] for k in u.keys() if k not in ['firstname', 'lastname', 'email', ]
- })
+ auth_user_data_received.send(
+ sender=auth_api,
+ user=user,
+ userdata={
+ k: u[k]
+ for k in u.keys()
+ if k
+ not in [
+ "firstname",
+ "lastname",
+ "email",
+ ]
+ },
+ )
return HttpResponse("OK", status=200)
@@ -297,24 +334,24 @@ def user_search(searchterm=None, userid=None):
# 10 seconds is already quite long.
socket.setdefaulttimeout(10)
if userid:
- q = {'u': userid}
+ q = {"u": userid}
else:
- q = {'s': searchterm}
+ q = {"s": searchterm}
r = requests.get(
- '{0}search/'.format(settings.PGAUTH_REDIRECT),
+ "{0}search/".format(settings.PGAUTH_REDIRECT),
params=q,
)
if r.status_code != 200:
return []
- (ivs, datas) = r.text.encode('utf8').split(b'&')
+ (ivs, datas) = r.text.encode("utf8").split(b"&")
# Decryption time
- decryptor = AES.new(base64.b64decode(settings.PGAUTH_KEY),
- AES.MODE_CBC,
- base64.b64decode(ivs, "-_"))
- s = decryptor.decrypt(base64.b64decode(datas, "-_")).rstrip(b' ').decode('utf8')
+ decryptor = AES.new(
+ base64.b64decode(settings.PGAUTH_KEY), AES.MODE_CBC, base64.b64decode(ivs, "-_")
+ )
+ s = decryptor.decrypt(base64.b64decode(datas, "-_")).rstrip(b" ").decode("utf8")
j = json.loads(s)
return j
@@ -324,22 +361,24 @@ def user_search(searchterm=None, userid=None):
def subscribe_to_user_changes(userid):
socket.setdefaulttimeout(10)
- body = json.dumps({
- 'u': userid,
- })
+ body = json.dumps(
+ {
+ "u": userid,
+ }
+ )
h = hmac.digest(
base64.b64decode(settings.PGAUTH_KEY),
- msg=bytes(body, 'utf-8'),
- digest='sha512',
+ msg=bytes(body, "utf-8"),
+ digest="sha512",
)
# Ignore the result code, just post it
requests.post(
- '{0}subscribe/'.format(settings.PGAUTH_REDIRECT),
+ "{0}subscribe/".format(settings.PGAUTH_REDIRECT),
data=body,
headers={
- 'X-pgauth-sig': base64.b64encode(h),
+ "X-pgauth-sig": base64.b64encode(h),
},
)
@@ -359,15 +398,15 @@ def user_import(uid):
u = u[0]
- if User.objects.filter(username=u['u']).exists():
+ if User.objects.filter(username=u["u"]).exists():
raise Exception("User already exists")
u = User(
- username=u['u'],
- first_name=u['f'],
- last_name=u['l'],
- email=u['e'],
- password='setbypluginnotsha1',
+ username=u["u"],
+ first_name=u["f"],
+ last_name=u["l"],
+ email=u["e"],
+ password="setbypluginnotsha1",
)
u.save()
diff --git a/pgcommitfest/commitfest/admin.py b/pgcommitfest/commitfest/admin.py
index 0f7ffda8..8c8d62e5 100644
--- a/pgcommitfest/commitfest/admin.py
+++ b/pgcommitfest/commitfest/admin.py
@@ -1,10 +1,22 @@
from django.contrib import admin
-from .models import *
+from .models import (
+ CfbotBranch,
+ CfbotTask,
+ CommitFest,
+ Committer,
+ MailThread,
+ MailThreadAttachment,
+ Patch,
+ PatchHistory,
+ PatchOnCommitFest,
+ TargetVersion,
+ Topic,
+)
class CommitterAdmin(admin.ModelAdmin):
- list_display = ('user', 'active')
+ list_display = ("user", "active")
class PatchOnCommitFestInline(admin.TabularInline):
@@ -14,11 +26,16 @@ class PatchOnCommitFestInline(admin.TabularInline):
class PatchAdmin(admin.ModelAdmin):
inlines = (PatchOnCommitFestInline,)
- list_display = ('name', )
+ list_display = ("name",)
class MailThreadAttachmentAdmin(admin.ModelAdmin):
- list_display = ('date', 'author', 'messageid', 'mailthread',)
+ list_display = (
+ "date",
+ "author",
+ "messageid",
+ "mailthread",
+ )
admin.site.register(Committer, CommitterAdmin)
@@ -27,6 +44,8 @@ class MailThreadAttachmentAdmin(admin.ModelAdmin):
admin.site.register(Patch, PatchAdmin)
admin.site.register(PatchHistory)
admin.site.register(TargetVersion)
+admin.site.register(CfbotBranch)
+admin.site.register(CfbotTask)
admin.site.register(MailThread)
admin.site.register(MailThreadAttachment, MailThreadAttachmentAdmin)
diff --git a/pgcommitfest/commitfest/ajax.py b/pgcommitfest/commitfest/ajax.py
index eaf7cdc8..329a83f9 100644
--- a/pgcommitfest/commitfest/ajax.py
+++ b/pgcommitfest/commitfest/ajax.py
@@ -1,18 +1,27 @@
-from django.shortcuts import get_object_or_404
-from django.http import HttpResponse, Http404
from django.conf import settings
-from django.views.decorators.csrf import csrf_exempt
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.db import transaction
+from django.http import Http404, HttpResponse
+from django.shortcuts import get_object_or_404
+from django.views.decorators.csrf import csrf_exempt
-import requests
import json
+import re
import textwrap
+import requests
+
from pgcommitfest.auth import user_search
-from .models import CommitFest, Patch, MailThread, MailThreadAttachment
-from .models import MailThreadAnnotation, PatchHistory
+
+from .models import (
+ CommitFest,
+ MailThread,
+ MailThreadAnnotation,
+ MailThreadAttachment,
+ Patch,
+ PatchHistory,
+)
class HttpResponseServiceUnavailable(HttpResponse):
@@ -23,16 +32,39 @@ class Http503(Exception):
pass
+def mockArchivesAPI(path):
+ with open(settings.MOCK_ARCHIVE_DATA, "r", encoding="utf-8") as file:
+ data = json.load(file)
+ for message in data:
+ message["atts"] = []
+
+ message_pattern = re.compile(r"^/message-id\.json/(?P[^/]+)$")
+
+ message_match = message_pattern.match(path)
+ if message_match:
+ message_id = message_match.group("message_id")
+ return [message for message in data if message["msgid"] == message_id]
+ else:
+ return data
+
+
def _archivesAPI(suburl, params=None):
+ if getattr(settings, "MOCK_ARCHIVES", False) and getattr(
+ settings, "MOCK_ARCHIVE_DATA"
+ ):
+ return mockArchivesAPI(suburl)
+
try:
resp = requests.get(
- "http{0}://{1}:{2}{3}".format(settings.ARCHIVES_PORT == 443 and 's' or '',
- settings.ARCHIVES_SERVER,
- settings.ARCHIVES_PORT,
- suburl),
+ "http{0}://{1}:{2}{3}".format(
+ settings.ARCHIVES_PORT == 443 and "s" or "",
+ settings.ARCHIVES_SERVER,
+ settings.ARCHIVES_PORT,
+ suburl,
+ ),
params=params,
headers={
- 'Host': settings.ARCHIVES_HOST,
+ "Host": settings.ARCHIVES_HOST,
},
timeout=settings.ARCHIVES_TIMEOUT,
)
@@ -49,41 +81,43 @@ def _archivesAPI(suburl, params=None):
def getThreads(request):
- search = request.GET.get('s', None)
- if request.GET.get('a', '0') == '1':
+ search = request.GET.get("s", None)
+ if request.GET.get("a", "0") == "1":
attachonly = 1
else:
attachonly = 0
# Make a JSON api call to the archives server
- params = {'n': 100, 'a': attachonly}
+ params = {"n": 100, "a": attachonly}
if search:
- params['s'] = search
+ params["s"] = search
- r = _archivesAPI('/list/pgsql-hackers/latest.json', params)
- return sorted(r, key=lambda x: x['date'], reverse=True)
+ r = _archivesAPI("/list/pgsql-hackers/latest.json", params)
+ return sorted(r, key=lambda x: x["date"], reverse=True)
def getMessages(request):
- if 't' not in request.GET:
+ if "t" not in request.GET:
raise Http404("Missing parameter")
- threadid = request.GET['t']
+ threadid = request.GET["t"]
thread = MailThread.objects.get(pk=threadid)
# Always make a call over to the archives api
- r = _archivesAPI('/message-id.json/%s' % thread.messageid)
- return sorted(r, key=lambda x: x['date'], reverse=True)
+ r = _archivesAPI("/message-id.json/%s" % thread.messageid)
+ return sorted(r, key=lambda x: x["date"], reverse=True)
def refresh_single_thread(thread):
- r = sorted(_archivesAPI('/message-id.json/%s' % thread.messageid), key=lambda x: x['date'])
- if thread.latestmsgid != r[-1]['msgid']:
+ r = sorted(
+ _archivesAPI("/message-id.json/%s" % thread.messageid), key=lambda x: x["date"]
+ )
+ if thread.latestmsgid != r[-1]["msgid"]:
# There is now a newer mail in the thread!
- thread.latestmsgid = r[-1]['msgid']
- thread.latestmessage = r[-1]['date']
- thread.latestauthor = r[-1]['from']
- thread.latestsubject = r[-1]['subj']
+ thread.latestmsgid = r[-1]["msgid"]
+ thread.latestmessage = r[-1]["date"]
+ thread.latestauthor = r[-1]["from"]
+ thread.latestsubject = r[-1]["subj"]
thread.save()
parse_and_add_attachments(r, thread)
# Potentially update the last mail date - if there wasn't already a mail on each patch
@@ -95,142 +129,163 @@ def refresh_single_thread(thread):
@transaction.atomic
def annotateMessage(request):
- thread = get_object_or_404(MailThread, pk=int(request.POST['t']))
- msgid = request.POST['msgid']
- msg = request.POST['msg']
+ thread = get_object_or_404(MailThread, pk=int(request.POST["t"]))
+ msgid = request.POST["msgid"]
+ msg = request.POST["msg"]
# Get the subject, author and date from the archives
# We only have an API call to get the whole thread right now, so
# do that, and then find our entry in it.
- r = _archivesAPI('/message-id.json/%s' % thread.messageid)
+ r = _archivesAPI("/message-id.json/%s" % thread.messageid)
for m in r:
- if m['msgid'] == msgid:
- annotation = MailThreadAnnotation(mailthread=thread,
- user=request.user,
- msgid=msgid,
- annotationtext=msg,
- mailsubject=m['subj'],
- maildate=m['date'],
- mailauthor=m['from'])
+ if m["msgid"] == msgid:
+ annotation = MailThreadAnnotation(
+ mailthread=thread,
+ user=request.user,
+ msgid=msgid,
+ annotationtext=msg,
+ mailsubject=m["subj"],
+ maildate=m["date"],
+ mailauthor=m["from"],
+ )
annotation.save()
for p in thread.patches.all():
- PatchHistory(patch=p, by=request.user, what='Added annotation "%s" to %s' % (textwrap.shorten(msg, 100), msgid)).save_and_notify()
+ PatchHistory(
+ patch=p,
+ by=request.user,
+ what='Added annotation "%s" to %s'
+ % (textwrap.shorten(msg, 100), msgid),
+ ).save_and_notify()
p.set_modified()
p.save()
- return 'OK'
- return 'Message not found in thread!'
+ return "OK"
+ return "Message not found in thread!"
@transaction.atomic
def deleteAnnotation(request):
- annotation = get_object_or_404(MailThreadAnnotation, pk=request.POST['id'])
+ annotation = get_object_or_404(MailThreadAnnotation, pk=request.POST["id"])
for p in annotation.mailthread.patches.all():
- PatchHistory(patch=p, by=request.user, what='Deleted annotation "%s" from %s' % (annotation.annotationtext, annotation.msgid)).save_and_notify()
+ PatchHistory(
+ patch=p,
+ by=request.user,
+ what='Deleted annotation "%s" from %s'
+ % (annotation.annotationtext, annotation.msgid),
+ ).save_and_notify()
p.set_modified()
p.save()
annotation.delete()
- return 'OK'
+ return "OK"
def parse_and_add_attachments(threadinfo, mailthread):
for t in threadinfo:
- if len(t['atts']):
+ if len(t["atts"]):
# One or more attachments. For now, we're only actually going
# to store and process the first one, even though the API gets
# us all of them.
- MailThreadAttachment.objects.get_or_create(mailthread=mailthread,
- messageid=t['msgid'],
- defaults={
- 'date': t['date'],
- 'author': t['from'],
- 'attachmentid': t['atts'][0]['id'],
- 'filename': t['atts'][0]['name'],
- })
+ MailThreadAttachment.objects.get_or_create(
+ mailthread=mailthread,
+ messageid=t["msgid"],
+ defaults={
+ "date": t["date"],
+ "author": t["from"],
+ "attachmentid": t["atts"][0]["id"],
+ "filename": t["atts"][0]["name"],
+ },
+ )
# In theory we should remove objects if they don't have an
# attachment, but how could that ever happen? Ignore for now.
@transaction.atomic
def attachThread(request):
- cf = get_object_or_404(CommitFest, pk=int(request.POST['cf']))
- patch = get_object_or_404(Patch, pk=int(request.POST['p']), commitfests=cf)
- msgid = request.POST['msg']
+ cf = get_object_or_404(CommitFest, pk=int(request.POST["cf"]))
+ patch = get_object_or_404(Patch, pk=int(request.POST["p"]), commitfests=cf)
+ msgid = request.POST["msg"]
return doAttachThread(cf, patch, msgid, request.user)
def doAttachThread(cf, patch, msgid, user):
# Note! Must be called in an open transaction!
- r = sorted(_archivesAPI('/message-id.json/%s' % msgid), key=lambda x: x['date'])
+ r = sorted(_archivesAPI("/message-id.json/%s" % msgid), key=lambda x: x["date"])
# We have the full thread metadata - using the first and last entry,
# construct a new mailthread in our own model.
# First, though, check if it's already there.
- threads = MailThread.objects.filter(messageid=r[0]['msgid'])
+ threads = MailThread.objects.filter(messageid=r[0]["msgid"])
if len(threads):
thread = threads[0]
if thread.patches.filter(id=patch.id).exists():
- return 'This thread is already added to this email'
+ return "This thread is already added to this email"
# We did not exist, so we'd better add ourselves.
# While at it, we update the thread entry with the latest data from the
# archives.
thread.patches.add(patch)
- thread.latestmessage = r[-1]['date']
- thread.latestauthor = r[-1]['from']
- thread.latestsubject = r[-1]['subj']
- thread.latestmsgid = r[-1]['msgid']
+ thread.latestmessage = r[-1]["date"]
+ thread.latestauthor = r[-1]["from"]
+ thread.latestsubject = r[-1]["subj"]
+ thread.latestmsgid = r[-1]["msgid"]
thread.save()
else:
# No existing thread existed, so create it
# Now create a new mailthread entry
- m = MailThread(messageid=r[0]['msgid'],
- subject=r[0]['subj'],
- firstmessage=r[0]['date'],
- firstauthor=r[0]['from'],
- latestmessage=r[-1]['date'],
- latestauthor=r[-1]['from'],
- latestsubject=r[-1]['subj'],
- latestmsgid=r[-1]['msgid'],
- )
+ m = MailThread(
+ messageid=r[0]["msgid"],
+ subject=r[0]["subj"],
+ firstmessage=r[0]["date"],
+ firstauthor=r[0]["from"],
+ latestmessage=r[-1]["date"],
+ latestauthor=r[-1]["from"],
+ latestsubject=r[-1]["subj"],
+ latestmsgid=r[-1]["msgid"],
+ )
m.save()
m.patches.add(patch)
m.save()
parse_and_add_attachments(r, m)
- PatchHistory(patch=patch, by=user, what='Attached mail thread %s' % r[0]['msgid']).save_and_notify()
+ PatchHistory(
+ patch=patch, by=user, what="Attached mail thread %s" % r[0]["msgid"]
+ ).save_and_notify()
patch.update_lastmail()
patch.set_modified()
patch.save()
- return 'OK'
+ return "OK"
@transaction.atomic
def detachThread(request):
- cf = get_object_or_404(CommitFest, pk=int(request.POST['cf']))
- patch = get_object_or_404(Patch, pk=int(request.POST['p']), commitfests=cf)
- thread = get_object_or_404(MailThread, messageid=request.POST['msg'])
+ cf = get_object_or_404(CommitFest, pk=int(request.POST["cf"]))
+ patch = get_object_or_404(Patch, pk=int(request.POST["p"]), commitfests=cf)
+ thread = get_object_or_404(MailThread, messageid=request.POST["msg"])
patch.mailthread_set.remove(thread)
- PatchHistory(patch=patch, by=request.user, what='Detached mail thread %s' % request.POST['msg']).save_and_notify()
+ PatchHistory(
+ patch=patch,
+ by=request.user,
+ what="Detached mail thread %s" % request.POST["msg"],
+ ).save_and_notify()
patch.update_lastmail()
patch.set_modified()
patch.save()
- return 'OK'
+ return "OK"
def searchUsers(request):
if not request.user.is_staff:
return []
- if request.GET.get('s', ''):
- return user_search(request.GET['s'])
+ if request.GET.get("s", ""):
+ return user_search(request.GET["s"])
else:
return []
@@ -239,35 +294,36 @@ def importUser(request):
if not request.user.is_staff:
raise Http404()
- if request.GET.get('u', ''):
- u = user_search(userid=request.GET['u'])
+ if request.GET.get("u", ""):
+ u = user_search(userid=request.GET["u"])
if len(u) != 1:
return "Internal error, duplicate user found"
u = u[0]
- if User.objects.filter(username=u['u']).exists():
+ if User.objects.filter(username=u["u"]).exists():
return "User already exists"
- User(username=u['u'],
- first_name=u['f'],
- last_name=u['l'],
- email=u['e'],
- password='setbypluginnotsha1',
- ).save()
- return 'OK'
+ User(
+ username=u["u"],
+ first_name=u["f"],
+ last_name=u["l"],
+ email=u["e"],
+ password="setbypluginnotsha1",
+ ).save()
+ return "OK"
else:
raise Http404()
_ajax_map = {
- 'getThreads': getThreads,
- 'getMessages': getMessages,
- 'attachThread': attachThread,
- 'detachThread': detachThread,
- 'annotateMessage': annotateMessage,
- 'deleteAnnotation': deleteAnnotation,
- 'searchUsers': searchUsers,
- 'importUser': importUser,
+ "getThreads": getThreads,
+ "getMessages": getMessages,
+ "attachThread": attachThread,
+ "detachThread": detachThread,
+ "annotateMessage": annotateMessage,
+ "deleteAnnotation": deleteAnnotation,
+ "searchUsers": searchUsers,
+ "importUser": importUser,
}
@@ -278,8 +334,8 @@ def main(request, command):
if command not in _ajax_map:
raise Http404
try:
- resp = HttpResponse(content_type='application/json')
+ resp = HttpResponse(content_type="application/json")
json.dump(_ajax_map[command](request), resp)
return resp
except Http503 as e:
- return HttpResponseServiceUnavailable(e, content_type='text/plain')
+ return HttpResponseServiceUnavailable(e, content_type="text/plain")
diff --git a/pgcommitfest/commitfest/apps.py b/pgcommitfest/commitfest/apps.py
index e47efed8..7dbe4cb2 100644
--- a/pgcommitfest/commitfest/apps.py
+++ b/pgcommitfest/commitfest/apps.py
@@ -2,7 +2,7 @@
class CFAppConfig(AppConfig):
- name = 'pgcommitfest.commitfest'
+ name = "pgcommitfest.commitfest"
def ready(self):
from pgcommitfest.auth import auth_user_data_received
diff --git a/pgcommitfest/commitfest/feeds.py b/pgcommitfest/commitfest/feeds.py
index aa950fb3..9aff9025 100644
--- a/pgcommitfest/commitfest/feeds.py
+++ b/pgcommitfest/commitfest/feeds.py
@@ -2,15 +2,17 @@
class ActivityFeed(Feed):
- title = description = 'Commitfest Activity Log'
- link = 'https://commitfest.postgresql.org/'
+ title = description = "Commitfest Activity Log"
+ link = "https://commitfest.postgresql.org/"
def __init__(self, activity, cf, *args, **kwargs):
super(ActivityFeed, self).__init__(*args, **kwargs)
self.activity = activity
if cf:
self.cfid = cf.id
- self.title = self.description = 'PostgreSQL Commitfest {0} Activity Log'.format(cf.name)
+ self.title = self.description = (
+ "PostgreSQL Commitfest {0} Activity Log".format(cf.name)
+ )
else:
self.cfid = None
@@ -18,16 +20,22 @@ def items(self):
return self.activity
def item_title(self, item):
- return item['name']
+ return item["name"]
def item_description(self, item):
- return "Patch: {name}
User: {by}
\n{what}
".format(**item)
+ return (
+ "Patch: {name}
User: {by}
\n{what}
".format(
+ **item
+ )
+ )
def item_link(self, item):
if self.cfid:
- return 'https://commitfest.postgresql.org/{0}/{1}/'.format(self.cfid, item['patchid'])
+ return "https://commitfest.postgresql.org/{0}/{1}/".format(
+ self.cfid, item["patchid"]
+ )
else:
- return 'https://commitfest.postgresql.org/{cfid}/{patchid}/'.format(**item)
+ return "https://commitfest.postgresql.org/{cfid}/{patchid}/".format(**item)
def item_pubdate(self, item):
- return item['date']
+ return item["date"]
diff --git a/pgcommitfest/commitfest/fixtures/archive_data.json b/pgcommitfest/commitfest/fixtures/archive_data.json
new file mode 100644
index 00000000..680ea086
--- /dev/null
+++ b/pgcommitfest/commitfest/fixtures/archive_data.json
@@ -0,0 +1,602 @@
+[
+ {
+ "msgid": "example@message-0",
+ "date": "2025-01-20T14:20:10",
+ "from": "test@test.com",
+ "subj": "Re: Sample rate added to pg_stat_statements"
+ },
+ {
+ "msgid": "example@message-1",
+ "date": "2025-01-20T14:01:53",
+ "from": "test@test.com",
+ "subj": "Re: [PATCH] Add get_bytes() and set_bytes() functions"
+ },
+ {
+ "msgid": "example@message-2",
+ "date": "2025-01-20T13:49:45",
+ "from": "test@test.com",
+ "subj": "pg_stat_statements: improve loading and saving routines for the dump\n file"
+ },
+ {
+ "msgid": "example@message-3",
+ "date": "2025-01-20T13:26:55",
+ "from": "test@test.com",
+ "subj": "Re: per backend I/O statistics"
+ },
+ {
+ "msgid": "example@message-4",
+ "date": "2025-01-20T12:44:40",
+ "from": "test@test.com",
+ "subj": "Re: create subscription with (origin = none, copy_data = on)"
+ },
+ {
+ "msgid": "example@message-5",
+ "date": "2025-01-20T11:10:40",
+ "from": "test@test.com",
+ "subj": "Re: per backend I/O statistics"
+ },
+ {
+ "msgid": "example@message-6",
+ "date": "2025-01-20T08:21:35",
+ "from": "test@test.com",
+ "subj": "Re: Statistics Import and Export"
+ },
+ {
+ "msgid": "example@message-7",
+ "date": "2025-01-20T08:03:54",
+ "from": "test@test.com",
+ "subj": "Re: Introduce XID age and inactive timeout based replication slot invalidation"
+ },
+ {
+ "msgid": "example@message-8",
+ "date": "2025-01-20T06:53:39",
+ "from": "test@test.com",
+ "subj": "RE: Conflict detection for update_deleted in logical replication"
+ },
+ {
+ "msgid": "example@message-9",
+ "date": "2025-01-20T06:49:41",
+ "from": "test@test.com",
+ "subj": "Re: Adding a '--two-phase' option to 'pg_createsubscriber' utility."
+ },
+ {
+ "msgid": "example@message-10",
+ "date": "2025-01-20T06:34:41",
+ "from": "test@test.com",
+ "subj": "Re: per backend I/O statistics"
+ },
+ {
+ "msgid": "example@message-11",
+ "date": "2025-01-20T05:56:21",
+ "from": "test@test.com",
+ "subj": "Re: [PATCH] immediately kill psql process if server is not running."
+ },
+ {
+ "msgid": "example@message-12",
+ "date": "2025-01-20T05:33:23",
+ "from": "test@test.com",
+ "subj": "Re: connection establishment versus parallel workers"
+ },
+ {
+ "msgid": "example@message-13",
+ "date": "2025-01-20T05:32:07",
+ "from": "test@test.com",
+ "subj": "Re: Pgoutput not capturing the generated columns"
+ },
+ {
+ "msgid": "example@message-14",
+ "date": "2025-01-20T04:10:41",
+ "from": "test@test.com",
+ "subj": "Re: Pgoutput not capturing the generated columns"
+ },
+ {
+ "msgid": "example@message-15",
+ "date": "2025-01-20T04:01:27",
+ "from": "test@test.com",
+ "subj": "int64 support in List API"
+ },
+ {
+ "msgid": "example@message-16",
+ "date": "2025-01-19T23:55:17",
+ "from": "test@test.com",
+ "subj": "Re: Add RESPECT/IGNORE NULLS and FROM FIRST/LAST options"
+ },
+ {
+ "msgid": "example@message-17",
+ "date": "2025-01-19T23:47:14",
+ "from": "test@test.com",
+ "subj": "Re: attndims, typndims still not enforced, but make the value within a sane threshold"
+ },
+ {
+ "msgid": "example@message-18",
+ "date": "2025-01-19T15:50:49",
+ "from": "test@test.com",
+ "subj": "Re: Parallel heap vacuum"
+ },
+ {
+ "msgid": "example@message-19",
+ "date": "2025-01-19T14:56:49",
+ "from": "test@test.com",
+ "subj": "Re: [RFC] Lock-free XLog Reservation from WAL"
+ },
+ {
+ "msgid": "example@message-20",
+ "date": "2025-01-19T12:16:49",
+ "from": "test@test.com",
+ "subj": "Re: Pgoutput not capturing the generated columns"
+ },
+ {
+ "msgid": "example@message-21",
+ "date": "2025-01-19T09:33:55",
+ "from": "test@test.com",
+ "subj": "Re: Add XMLNamespaces to XMLElement"
+ },
+ {
+ "msgid": "example@message-22",
+ "date": "2025-01-19T00:11:32",
+ "from": "test@test.com",
+ "subj": "Get rid of WALBufMappingLock"
+ },
+ {
+ "msgid": "example@message-23",
+ "date": "2025-01-18T23:42:50",
+ "from": "test@test.com",
+ "subj": "Re: improve DEBUG1 logging of parallel workers for CREATE INDEX?"
+ },
+ {
+ "msgid": "example@message-24",
+ "date": "2025-01-18T20:37:54",
+ "from": "test@test.com",
+ "subj": "Re: Adding comments to help understand psql hidden queries"
+ },
+ {
+ "msgid": "example@message-25",
+ "date": "2025-01-18T19:44:00",
+ "from": "test@test.com",
+ "subj": "Re: Coccinelle for PostgreSQL development [1/N]: coccicheck.py"
+ },
+ {
+ "msgid": "example@message-26",
+ "date": "2025-01-18T17:32:10",
+ "from": "test@test.com",
+ "subj": "Re: Replace current implementations in crypt() and gen_salt() to\n OpenSSL"
+ },
+ {
+ "msgid": "example@message-27",
+ "date": "2025-01-18T17:00:04",
+ "from": "test@test.com",
+ "subj": "Re: Statistics Import and Export"
+ },
+ {
+ "msgid": "example@message-28",
+ "date": "2025-01-18T16:51:08",
+ "from": "test@test.com",
+ "subj": "Re: Confine vacuum skip logic to lazy_scan_skip"
+ },
+ {
+ "msgid": "example@message-29",
+ "date": "2025-01-18T14:18:00",
+ "from": "test@test.com",
+ "subj": "Re: Revisiting {CREATE INDEX, REINDEX} CONCURRENTLY improvements"
+ },
+ {
+ "msgid": "example@message-30",
+ "date": "2025-01-18T12:59:35",
+ "from": "test@test.com",
+ "subj": "Re: Issues with ON CONFLICT UPDATE and REINDEX CONCURRENTLY"
+ },
+ {
+ "msgid": "example@message-31",
+ "date": "2025-01-18T07:14:02",
+ "from": "test@test.com",
+ "subj": "Re: Old BufferDesc refcount in PrintBufferDescs and PrintPinnedBufs"
+ },
+ {
+ "msgid": "example@message-32",
+ "date": "2025-01-18T06:42:15",
+ "from": "test@test.com",
+ "subj": "Re: Collation & ctype method table, and extension hooks"
+ },
+ {
+ "msgid": "example@message-33",
+ "date": "2025-01-18T05:01:27",
+ "from": "test@test.com",
+ "subj": "Re: create subscription with (origin = none, copy_data = on)"
+ },
+ {
+ "msgid": "example@message-34",
+ "date": "2025-01-18T03:45:13",
+ "from": "test@test.com",
+ "subj": "RE: Conflict detection for update_deleted in logical replication"
+ },
+ {
+ "msgid": "example@message-35",
+ "date": "2025-01-18T02:02:03",
+ "from": "test@test.com",
+ "subj": "Re: Old BufferDesc refcount in PrintBufferDescs and PrintPinnedBufs"
+ },
+ {
+ "msgid": "example@message-36",
+ "date": "2025-01-18T01:23:19",
+ "from": "test@test.com",
+ "subj": "rename es_epq_active to es_epqstate"
+ },
+ {
+ "msgid": "example@message-37",
+ "date": "2025-01-18T01:11:41",
+ "from": "test@test.com",
+ "subj": "Re: pg_trgm comparison bug on cross-architecture replication due to\n different char implementation"
+ },
+ {
+ "msgid": "example@message-38",
+ "date": "2025-01-18T00:34:43",
+ "from": "test@test.com",
+ "subj": "Re: Add CASEFOLD() function."
+ },
+ {
+ "msgid": "example@message-39",
+ "date": "2025-01-18T00:27:43",
+ "from": "test@test.com",
+ "subj": "Re: [PATCH] Add roman support for to_number function"
+ },
+ {
+ "msgid": "example@message-40",
+ "date": "2025-01-17T22:11:56",
+ "from": "test@test.com",
+ "subj": "Old BufferDesc refcount in PrintBufferDescs and PrintPinnedBufs"
+ },
+ {
+ "msgid": "example@message-41",
+ "date": "2025-01-17T20:44:01",
+ "from": "test@test.com",
+ "subj": "Re: Bug in detaching a partition with a foreign key."
+ },
+ {
+ "msgid": "example@message-42",
+ "date": "2025-01-17T19:02:15",
+ "from": "test@test.com",
+ "subj": "Re: [PoC] Federated Authn/z with OAUTHBEARER"
+ },
+ {
+ "msgid": "example@message-43",
+ "date": "2025-01-17T16:43:29",
+ "from": "test@test.com",
+ "subj": "Re: Add RESPECT/IGNORE NULLS and FROM FIRST/LAST options"
+ },
+ {
+ "msgid": "example@message-44",
+ "date": "2025-01-17T16:01:53",
+ "from": "test@test.com",
+ "subj": "Re: Accept recovery conflict interrupt on blocked writing"
+ },
+ {
+ "msgid": "example@message-45",
+ "date": "2025-01-17T15:45:46",
+ "from": "test@test.com",
+ "subj": "Re: Set AUTOCOMMIT to on in script output by pg_dump"
+ },
+ {
+ "msgid": "example@message-46",
+ "date": "2025-01-17T15:42:13",
+ "from": "test@test.com",
+ "subj": "Re: POC: track vacuum/analyze cumulative time per relation"
+ },
+ {
+ "msgid": "example@message-47",
+ "date": "2025-01-17T15:40:54",
+ "from": "test@test.com",
+ "subj": "Re: pure parsers and reentrant scanners"
+ },
+ {
+ "msgid": "example@message-48",
+ "date": "2025-01-17T14:20:12",
+ "from": "test@test.com",
+ "subj": "Re: Statistics Import and Export"
+ },
+ {
+ "msgid": "example@message-49",
+ "date": "2025-01-17T12:50:15",
+ "from": "test@test.com",
+ "subj": "Re: NOT ENFORCED constraint feature"
+ },
+ {
+ "msgid": "example@message-50",
+ "date": "2025-01-17T12:03:09",
+ "from": "test@test.com",
+ "subj": "Re: Bypassing cursors in postgres_fdw to enable parallel plans"
+ },
+ {
+ "msgid": "example@message-51",
+ "date": "2025-01-17T10:23:48",
+ "from": "test@test.com",
+ "subj": "Re: per backend I/O statistics"
+ },
+ {
+ "msgid": "example@message-52",
+ "date": "2025-01-17T09:29:50",
+ "from": "test@test.com",
+ "subj": "Re: Add “FOR UPDATE NOWAIT” lock details to the log."
+ },
+ {
+ "msgid": "example@message-53",
+ "date": "2025-01-17T08:30:04",
+ "from": "test@test.com",
+ "subj": "create subscription with (origin = none, copy_data = on)"
+ },
+ {
+ "msgid": "example@message-54",
+ "date": "2025-01-17T07:18:20",
+ "from": "test@test.com",
+ "subj": "Re: Re: proposal: schema variables"
+ },
+ {
+ "msgid": "example@message-55",
+ "date": "2025-01-17T07:15:34",
+ "from": "test@test.com",
+ "subj": "Re: SQLFunctionCache and generic plans"
+ },
+ {
+ "msgid": "example@message-56",
+ "date": "2025-01-17T05:05:41",
+ "from": "test@test.com",
+ "subj": "Re: Some ExecSeqScan optimizations"
+ },
+ {
+ "msgid": "example@message-57",
+ "date": "2025-01-17T05:00:49",
+ "from": "test@test.com",
+ "subj": "Remove XLogRecGetFullXid() in xlogreader.c?"
+ },
+ {
+ "msgid": "example@message-58",
+ "date": "2025-01-17T04:22:07",
+ "from": "test@test.com",
+ "subj": "Re: Adding a '--two-phase' option to 'pg_createsubscriber' utility."
+ },
+ {
+ "msgid": "example@message-59",
+ "date": "2025-01-17T03:18:45",
+ "from": "test@test.com",
+ "subj": "Automatic update of time column"
+ },
+ {
+ "msgid": "example@message-60",
+ "date": "2025-01-17T01:06:14",
+ "from": "test@test.com",
+ "subj": "Re: Parallel heap vacuum"
+ },
+ {
+ "msgid": "example@message-61",
+ "date": "2025-01-17T01:05:53",
+ "from": "test@test.com",
+ "subj": "Timeline issue if StartupXLOG() is interrupted right before\n end-of-recovery record is done"
+ },
+ {
+ "msgid": "example@message-62",
+ "date": "2025-01-16T22:50:14",
+ "from": "test@test.com",
+ "subj": "Re: Trigger more frequent autovacuums of heavy insert tables"
+ },
+ {
+ "msgid": "example@message-63",
+ "date": "2025-01-16T22:41:06",
+ "from": "test@test.com",
+ "subj": "Re: Document NULL"
+ },
+ {
+ "msgid": "example@message-64",
+ "date": "2025-01-16T21:43:49",
+ "from": "test@test.com",
+ "subj": "Re: Trigger more frequent autovacuums of heavy insert tables"
+ },
+ {
+ "msgid": "example@message-65",
+ "date": "2025-01-16T20:52:54",
+ "from": "test@test.com",
+ "subj": "Re: An improvement of ProcessTwoPhaseBuffer logic"
+ },
+ {
+ "msgid": "example@message-66",
+ "date": "2025-01-16T19:38:21",
+ "from": "test@test.com",
+ "subj": "Re: Document How Commit Handles Aborted Transactions"
+ },
+ {
+ "msgid": "example@message-67",
+ "date": "2025-01-16T18:42:32",
+ "from": "test@test.com",
+ "subj": "Re: Non-text mode for pg_dumpall"
+ },
+ {
+ "msgid": "example@message-68",
+ "date": "2025-01-16T15:59:31",
+ "from": "test@test.com",
+ "subj": "Re: per backend WAL statistics"
+ },
+ {
+ "msgid": "example@message-69",
+ "date": "2025-01-16T14:14:25",
+ "from": "test@test.com",
+ "subj": "Re: [PATCH] Add sortsupport for range types and btree_gist"
+ },
+ {
+ "msgid": "example@message-70",
+ "date": "2025-01-16T13:53:31",
+ "from": "test@test.com",
+ "subj": "Bug in detaching a partition with a foreign key."
+ },
+ {
+ "msgid": "example@message-71",
+ "date": "2025-01-16T13:52:46",
+ "from": "test@test.com",
+ "subj": "Increase NUM_XLOGINSERT_LOCKS"
+ },
+ {
+ "msgid": "example@message-72",
+ "date": "2025-01-16T13:32:09",
+ "from": "test@test.com",
+ "subj": "Re: POC: make mxidoff 64 bits"
+ },
+ {
+ "msgid": "example@message-73",
+ "date": "2025-01-16T13:24:41",
+ "from": "test@test.com",
+ "subj": "Re: Accept recovery conflict interrupt on blocked writing"
+ },
+ {
+ "msgid": "example@message-74",
+ "date": "2025-01-16T11:16:06",
+ "from": "test@test.com",
+ "subj": "Re: Adding a '--two-phase' option to 'pg_createsubscriber' utility."
+ },
+ {
+ "msgid": "example@message-75",
+ "date": "2025-01-16T10:54:53",
+ "from": "test@test.com",
+ "subj": "Re: Change GUC hashtable to use simplehash?"
+ },
+ {
+ "msgid": "example@message-76",
+ "date": "2025-01-16T10:54:22",
+ "from": "test@test.com",
+ "subj": "Re: Psql meta-command conninfo+"
+ },
+ {
+ "msgid": "example@message-77",
+ "date": "2025-01-16T08:47:08",
+ "from": "test@test.com",
+ "subj": "Re: Pgoutput not capturing the generated columns"
+ },
+ {
+ "msgid": "example@message-78",
+ "date": "2025-01-16T08:44:18",
+ "from": "test@test.com",
+ "subj": "Re: Non-text mode for pg_dumpall"
+ },
+ {
+ "msgid": "example@message-79",
+ "date": "2025-01-16T08:40:51",
+ "from": "test@test.com",
+ "subj": "Re: Show WAL write and fsync stats in pg_stat_io"
+ },
+ {
+ "msgid": "example@message-80",
+ "date": "2025-01-16T07:50:09",
+ "from": "test@test.com",
+ "subj": "Re: An improvement of ProcessTwoPhaseBuffer logic"
+ },
+ {
+ "msgid": "example@message-81",
+ "date": "2025-01-16T07:21:13",
+ "from": "test@test.com",
+ "subj": "Re: XMLDocument (SQL/XML X030)"
+ },
+ {
+ "msgid": "example@message-82",
+ "date": "2025-01-16T07:05:16",
+ "from": "test@test.com",
+ "subj": "Re: Introduce XID age and inactive timeout based replication slot invalidation"
+ },
+ {
+ "msgid": "example@message-83",
+ "date": "2025-01-16T07:04:23",
+ "from": "test@test.com",
+ "subj": "Re: Log a warning in pg_createsubscriber for max_slot_wal_keep_size"
+ },
+ {
+ "msgid": "example@message-84",
+ "date": "2025-01-16T05:38:19",
+ "from": "test@test.com",
+ "subj": "Re: TOAST versus toast"
+ },
+ {
+ "msgid": "example@message-85",
+ "date": "2025-01-16T05:17:39",
+ "from": "test@test.com",
+ "subj": "Re: Log a warning in pg_createsubscriber for max_slot_wal_keep_size"
+ },
+ {
+ "msgid": "example@message-86",
+ "date": "2025-01-16T05:13:08",
+ "from": "test@test.com",
+ "subj": "Re: Make pg_stat_io view count IOs as bytes instead of blocks"
+ },
+ {
+ "msgid": "example@message-87",
+ "date": "2025-01-16T04:14:31",
+ "from": "test@test.com",
+ "subj": "Re: Make pg_stat_io view count IOs as bytes instead of blocks"
+ },
+ {
+ "msgid": "example@message-88",
+ "date": "2025-01-16T03:57:49",
+ "from": "test@test.com",
+ "subj": "TOAST versus toast"
+ },
+ {
+ "msgid": "example@message-89",
+ "date": "2025-01-16T02:19:49",
+ "from": "test@test.com",
+ "subj": "Limit length of queryies in pg_stat_statement extension"
+ },
+ {
+ "msgid": "example@message-90",
+ "date": "2025-01-16T01:45:15",
+ "from": "test@test.com",
+ "subj": "Re: Confine vacuum skip logic to lazy_scan_skip"
+ },
+ {
+ "msgid": "example@message-91",
+ "date": "2025-01-16T01:15:31",
+ "from": "test@test.com",
+ "subj": "Re: Change GUC hashtable to use simplehash?"
+ },
+ {
+ "msgid": "example@message-92",
+ "date": "2025-01-16T01:12:51",
+ "from": "test@test.com",
+ "subj": "Fix misuse use of pg_b64_encode function (contrib/postgres_fdw/connection.c)"
+ },
+ {
+ "msgid": "example@message-93",
+ "date": "2025-01-16T01:00:51",
+ "from": "test@test.com",
+ "subj": "Re: An improvement of ProcessTwoPhaseBuffer logic"
+ },
+ {
+ "msgid": "example@message-94",
+ "date": "2025-01-16T00:42:49",
+ "from": "test@test.com",
+ "subj": "Re: Infinite loop in XLogPageRead() on standby"
+ },
+ {
+ "msgid": "example@message-95",
+ "date": "2025-01-15T23:47:51",
+ "from": "test@test.com",
+ "subj": "Re: convert libpgport's pqsignal() to a void function"
+ },
+ {
+ "msgid": "example@message-96",
+ "date": "2025-01-15T22:20:58",
+ "from": "test@test.com",
+ "subj": "Re: Use Python \"Limited API\" in PL/Python"
+ },
+ {
+ "msgid": "example@message-97",
+ "date": "2025-01-15T20:56:04",
+ "from": "test@test.com",
+ "subj": "Re: Statistics Import and Export"
+ },
+ {
+ "msgid": "example@message-98",
+ "date": "2025-01-15T20:55:52",
+ "from": "test@test.com",
+ "subj": "Re: Eagerly scan all-visible pages to amortize aggressive vacuum"
+ },
+ {
+ "msgid": "example@message-99",
+ "date": "2025-01-15T20:35:41",
+ "from": "test@test.com",
+ "subj": "Re: Add XMLNamespaces to XMLElement"
+ }
+]
diff --git a/pgcommitfest/commitfest/fixtures/auth_data.json b/pgcommitfest/commitfest/fixtures/auth_data.json
new file mode 100644
index 00000000..bfaf3bfb
--- /dev/null
+++ b/pgcommitfest/commitfest/fixtures/auth_data.json
@@ -0,0 +1,20 @@
+[
+{
+ "model": "auth.user",
+ "pk": 1,
+ "fields": {
+ "password": "pbkdf2_sha256$600000$49rgHaLmmFQUm7c663LCrU$i68PFeI493lPmgNx/RHnWNuw4ZRzzvJWNqU4os5VnF4=",
+ "last_login": "2025-01-26T10:43:07.735",
+ "is_superuser": true,
+ "username": "admin",
+ "first_name": "",
+ "last_name": "",
+ "email": "test@test.com",
+ "is_staff": true,
+ "is_active": true,
+ "date_joined": "2025-01-20T15:47:04.132",
+ "groups": [],
+ "user_permissions": []
+ }
+}
+]
diff --git a/pgcommitfest/commitfest/fixtures/commitfest_data.json b/pgcommitfest/commitfest/fixtures/commitfest_data.json
new file mode 100644
index 00000000..7bd54001
--- /dev/null
+++ b/pgcommitfest/commitfest/fixtures/commitfest_data.json
@@ -0,0 +1,619 @@
+[
+{
+ "model": "commitfest.commitfest",
+ "pk": 1,
+ "fields": {
+ "name": "Sample Old Commitfest",
+ "status": 4,
+ "startdate": "2024-05-01",
+ "enddate": "2024-05-31"
+ }
+},
+{
+ "model": "commitfest.commitfest",
+ "pk": 2,
+ "fields": {
+ "name": "Sample In Progress Commitfest",
+ "status": 3,
+ "startdate": "2025-01-01",
+ "enddate": "2025-02-28"
+ }
+},
+{
+ "model": "commitfest.commitfest",
+ "pk": 3,
+ "fields": {
+ "name": "Sample Open Commitfest",
+ "status": 2,
+ "startdate": "2025-03-01",
+ "enddate": "2025-03-31"
+ }
+},
+{
+ "model": "commitfest.commitfest",
+ "pk": 4,
+ "fields": {
+ "name": "Sample Future Commitfest",
+ "status": 1,
+ "startdate": "2025-05-01",
+ "enddate": "2025-05-31"
+ }
+},
+{
+ "model": "commitfest.topic",
+ "pk": 1,
+ "fields": {
+ "topic": "Bugs"
+ }
+},
+{
+ "model": "commitfest.topic",
+ "pk": 2,
+ "fields": {
+ "topic": "Performance"
+ }
+},
+{
+ "model": "commitfest.topic",
+ "pk": 3,
+ "fields": {
+ "topic": "Miscellaneous"
+ }
+},
+{
+ "model": "commitfest.targetversion",
+ "pk": 1,
+ "fields": {
+ "version": "18"
+ }
+},
+{
+ "model": "commitfest.patch",
+ "pk": 1,
+ "fields": {
+ "name": "Conflict detection for update_deleted in logical replication",
+ "topic": 1,
+ "wikilink": "",
+ "gitlink": "",
+ "targetversion": null,
+ "committer": null,
+ "created": "2025-01-26T10:48:31.579",
+ "modified": "2025-01-26T10:53:20.498",
+ "lastmail": "2025-01-20T06:53:39",
+ "authors": [
+ 1
+ ],
+ "reviewers": [],
+ "subscribers": [],
+ "mailthread_set": [
+ 1
+ ]
+ }
+},
+{
+ "model": "commitfest.patch",
+ "pk": 2,
+ "fields": {
+ "name": "Sample rate added to pg_stat_statements",
+ "topic": 3,
+ "wikilink": "",
+ "gitlink": "",
+ "targetversion": null,
+ "committer": null,
+ "created": "2025-01-26T10:51:17.305",
+ "modified": "2025-01-26T10:51:19.631",
+ "lastmail": "2025-01-20T14:20:10",
+ "authors": [],
+ "reviewers": [],
+ "subscribers": [],
+ "mailthread_set": [
+ 2
+ ]
+ }
+},
+{
+ "model": "commitfest.patch",
+ "pk": 3,
+ "fields": {
+ "name": "Per Backend I/O statistics",
+ "topic": 3,
+ "wikilink": "",
+ "gitlink": "",
+ "targetversion": null,
+ "committer": null,
+ "created": "2025-01-26T11:02:07.467",
+ "modified": "2025-01-26T11:02:10.911",
+ "lastmail": "2025-01-20T13:26:55",
+ "authors": [],
+ "reviewers": [],
+ "subscribers": [],
+ "mailthread_set": [
+ 3
+ ]
+ }
+},
+{
+ "model": "commitfest.patch",
+ "pk": 4,
+ "fields": {
+ "name": "create subscription with (origin = none, copy_data = none)",
+ "topic": 3,
+ "wikilink": "",
+ "gitlink": "",
+ "targetversion": null,
+ "committer": null,
+ "created": "2025-01-31T13:30:19.744",
+ "modified": "2025-01-31T13:30:21.305",
+ "lastmail": "2025-01-20T12:44:40",
+ "authors": [],
+ "reviewers": [],
+ "subscribers": [],
+ "mailthread_set": [
+ 4
+ ]
+ }
+},
+{
+ "model": "commitfest.patchoncommitfest",
+ "pk": 1,
+ "fields": {
+ "patch": 1,
+ "commitfest": 2,
+ "enterdate": "2025-01-26T10:48:31.579",
+ "leavedate": null,
+ "status": 3
+ }
+},
+{
+ "model": "commitfest.patchoncommitfest",
+ "pk": 2,
+ "fields": {
+ "patch": 2,
+ "commitfest": 2,
+ "enterdate": "2025-01-26T10:51:17.305",
+ "leavedate": null,
+ "status": 1
+ }
+},
+{
+ "model": "commitfest.patchoncommitfest",
+ "pk": 3,
+ "fields": {
+ "patch": 1,
+ "commitfest": 1,
+ "enterdate": "2024-04-01T10:52:24",
+ "leavedate": "2024-06-05T10:52:34",
+ "status": 5
+ }
+},
+{
+ "model": "commitfest.patchoncommitfest",
+ "pk": 4,
+ "fields": {
+ "patch": 3,
+ "commitfest": 3,
+ "enterdate": "2025-01-26T11:02:07.467",
+ "leavedate": null,
+ "status": 1
+ }
+},
+{
+ "model": "commitfest.patchoncommitfest",
+ "pk": 5,
+ "fields": {
+ "patch": 4,
+ "commitfest": 2,
+ "enterdate": "2025-01-31T13:30:19.745",
+ "leavedate": null,
+ "status": 1
+ }
+},
+{
+ "model": "commitfest.patchhistory",
+ "pk": 1,
+ "fields": {
+ "patch": 1,
+ "date": "2025-01-26T10:48:31.580",
+ "by": 1,
+ "by_cfbot": false,
+ "what": "Created patch record"
+ }
+},
+{
+ "model": "commitfest.patchhistory",
+ "pk": 2,
+ "fields": {
+ "patch": 1,
+ "date": "2025-01-26T10:48:31.582",
+ "by": 1,
+ "by_cfbot": false,
+ "what": "Attached mail thread example@message-8"
+ }
+},
+{
+ "model": "commitfest.patchhistory",
+ "pk": 3,
+ "fields": {
+ "patch": 1,
+ "date": "2025-01-26T10:48:54.115",
+ "by": 1,
+ "by_cfbot": false,
+ "what": "Changed authors to (admin)"
+ }
+},
+{
+ "model": "commitfest.patchhistory",
+ "pk": 4,
+ "fields": {
+ "patch": 2,
+ "date": "2025-01-26T10:51:17.306",
+ "by": 1,
+ "by_cfbot": false,
+ "what": "Created patch record"
+ }
+},
+{
+ "model": "commitfest.patchhistory",
+ "pk": 5,
+ "fields": {
+ "patch": 2,
+ "date": "2025-01-26T10:51:17.307",
+ "by": 1,
+ "by_cfbot": false,
+ "what": "Attached mail thread example@message-0"
+ }
+},
+{
+ "model": "commitfest.patchhistory",
+ "pk": 6,
+ "fields": {
+ "patch": 1,
+ "date": "2025-01-26T10:53:20.498",
+ "by": 1,
+ "by_cfbot": false,
+ "what": "New status: Ready for Committer"
+ }
+},
+{
+ "model": "commitfest.patchhistory",
+ "pk": 7,
+ "fields": {
+ "patch": 3,
+ "date": "2025-01-26T11:02:07.468",
+ "by": 1,
+ "by_cfbot": false,
+ "what": "Created patch record"
+ }
+},
+{
+ "model": "commitfest.patchhistory",
+ "pk": 8,
+ "fields": {
+ "patch": 3,
+ "date": "2025-01-26T11:02:07.469",
+ "by": 1,
+ "by_cfbot": false,
+ "what": "Attached mail thread example@message-3"
+ }
+},
+{
+ "model": "commitfest.patchhistory",
+ "pk": 9,
+ "fields": {
+ "patch": 4,
+ "date": "2025-01-31T13:30:19.745",
+ "by": 1,
+ "by_cfbot": false,
+ "what": "Created patch record"
+ }
+},
+{
+ "model": "commitfest.patchhistory",
+ "pk": 10,
+ "fields": {
+ "patch": 4,
+ "date": "2025-01-31T13:30:19.748",
+ "by": 1,
+ "by_cfbot": false,
+ "what": "Attached mail thread example@message-4"
+ }
+},
+{
+ "model": "commitfest.mailthread",
+ "pk": 1,
+ "fields": {
+ "messageid": "example@message-8",
+ "subject": "RE: Conflict detection for update_deleted in logical replication",
+ "firstmessage": "2025-01-20T06:53:39",
+ "firstauthor": "test@test.com",
+ "latestmessage": "2025-01-20T06:53:39",
+ "latestauthor": "test@test.com",
+ "latestsubject": "RE: Conflict detection for update_deleted in logical replication",
+ "latestmsgid": "example@message-8"
+ }
+},
+{
+ "model": "commitfest.mailthread",
+ "pk": 2,
+ "fields": {
+ "messageid": "example@message-0",
+ "subject": "Re: Sample rate added to pg_stat_statements",
+ "firstmessage": "2025-01-20T14:20:10",
+ "firstauthor": "test@test.com",
+ "latestmessage": "2025-01-20T14:20:10",
+ "latestauthor": "test@test.com",
+ "latestsubject": "Re: Sample rate added to pg_stat_statements",
+ "latestmsgid": "example@message-0"
+ }
+},
+{
+ "model": "commitfest.mailthread",
+ "pk": 3,
+ "fields": {
+ "messageid": "example@message-3",
+ "subject": "Re: per backend I/O statistics",
+ "firstmessage": "2025-01-20T13:26:55",
+ "firstauthor": "test@test.com",
+ "latestmessage": "2025-01-20T13:26:55",
+ "latestauthor": "test@test.com",
+ "latestsubject": "Re: per backend I/O statistics",
+ "latestmsgid": "example@message-3"
+ }
+},
+{
+ "model": "commitfest.mailthread",
+ "pk": 4,
+ "fields": {
+ "messageid": "example@message-4",
+ "subject": "Re: create subscription with (origin = none, copy_data = on)",
+ "firstmessage": "2025-01-20T12:44:40",
+ "firstauthor": "test@test.com",
+ "latestmessage": "2025-01-20T12:44:40",
+ "latestauthor": "test@test.com",
+ "latestsubject": "Re: create subscription with (origin = none, copy_data = on)",
+ "latestmsgid": "example@message-4"
+ }
+},
+{
+ "model": "commitfest.patchstatus",
+ "pk": 1,
+ "fields": {
+ "statusstring": "Needs review",
+ "sortkey": 10
+ }
+},
+{
+ "model": "commitfest.patchstatus",
+ "pk": 2,
+ "fields": {
+ "statusstring": "Waiting on Author",
+ "sortkey": 15
+ }
+},
+{
+ "model": "commitfest.patchstatus",
+ "pk": 3,
+ "fields": {
+ "statusstring": "Ready for Committer",
+ "sortkey": 20
+ }
+},
+{
+ "model": "commitfest.patchstatus",
+ "pk": 4,
+ "fields": {
+ "statusstring": "Committed",
+ "sortkey": 25
+ }
+},
+{
+ "model": "commitfest.patchstatus",
+ "pk": 5,
+ "fields": {
+ "statusstring": "Moved to next CF",
+ "sortkey": 30
+ }
+},
+{
+ "model": "commitfest.patchstatus",
+ "pk": 6,
+ "fields": {
+ "statusstring": "Rejected",
+ "sortkey": 50
+ }
+},
+{
+ "model": "commitfest.patchstatus",
+ "pk": 7,
+ "fields": {
+ "statusstring": "Returned with Feedback",
+ "sortkey": 50
+ }
+},
+{
+ "model": "commitfest.patchstatus",
+ "pk": 8,
+ "fields": {
+ "statusstring": "Withdrawn",
+ "sortkey": 50
+ }
+},
+{
+ "model": "commitfest.cfbotbranch",
+ "pk": 1,
+ "fields": {
+ "branch_id": 123,
+ "branch_name": "cf/1",
+ "commit_id": "abcdefg",
+ "apply_url": "http://cfbot.cputube.org/patch_4573.log",
+ "status": "finished",
+ "needs_rebase_since": null,
+ "created": "2025-01-26T22:06:02.980",
+ "modified": "2025-01-29T22:50:37.805",
+ "version": "",
+ "patch_count": 5,
+ "first_additions": 10,
+ "first_deletions": 5,
+ "all_additions": 834,
+ "all_deletions": 128
+ }
+},
+{
+ "model": "commitfest.cfbotbranch",
+ "pk": 2,
+ "fields": {
+ "branch_id": 345,
+ "branch_name": "cf/2",
+ "commit_id": null,
+ "apply_url": "http://cfbot.cputube.org/patch_4573.log",
+ "status": "failed",
+ "needs_rebase_since": null,
+ "created": "2025-01-26T22:11:09.961",
+ "modified": "2025-01-26T22:20:39.372",
+ "version": null,
+ "patch_count": null,
+ "first_additions": null,
+ "first_deletions": null,
+ "all_additions": null,
+ "all_deletions": null
+ }
+},
+{
+ "model": "commitfest.cfbotbranch",
+ "pk": 3,
+ "fields": {
+ "branch_id": 567,
+ "branch_name": "cf/3",
+ "commit_id": "abc123",
+ "apply_url": "http://cfbot.cputube.org/patch_4748.log",
+ "status": "failed",
+ "needs_rebase_since": null,
+ "created": "2025-01-26T22:22:46.602",
+ "modified": "2025-01-29T22:58:51.032",
+ "version": "",
+ "patch_count": 3,
+ "first_additions": 345,
+ "first_deletions": 158,
+ "all_additions": 8764,
+ "all_deletions": 345
+ }
+},
+{
+ "model": "commitfest.cfbotbranch",
+ "pk": 4,
+ "fields": {
+ "branch_id": 76,
+ "branch_name": "cf/4",
+ "commit_id": "abcggg",
+ "apply_url": "http://cfbot.cputube.org/patch_4748.log",
+ "status": "testing",
+ "needs_rebase_since": null,
+ "created": "2025-01-31T13:32:22.017",
+ "modified": "2025-01-31T13:32:22.017",
+ "version": "",
+ "patch_count": 1,
+ "first_additions": 123,
+ "first_deletions": 14,
+ "all_additions": 123,
+ "all_deletions": 14
+ }
+},
+{
+ "model": "commitfest.cfbottask",
+ "pk": 1,
+ "fields": {
+ "task_id": "12345",
+ "task_name": "Linux build",
+ "patch": 1,
+ "branch_id": 123,
+ "position": 1,
+ "status": "COMPLETED",
+ "created": "2025-01-26T22:06:49.237",
+ "modified": "2025-01-26T22:07:40.405"
+ }
+},
+{
+ "model": "commitfest.cfbottask",
+ "pk": 2,
+ "fields": {
+ "task_id": "12346",
+ "task_name": "MacOS Build",
+ "patch": 1,
+ "branch_id": 123,
+ "position": 2,
+ "status": "COMPLETED",
+ "created": "2025-01-26T22:07:32.041",
+ "modified": "2025-01-26T22:07:32.041"
+ }
+},
+{
+ "model": "commitfest.cfbottask",
+ "pk": 3,
+ "fields": {
+ "task_id": "4561",
+ "task_name": "Redhat",
+ "patch": 3,
+ "branch_id": 567,
+ "position": 1,
+ "status": "COMPLETED",
+ "created": "2025-01-26T22:24:37.445",
+ "modified": "2025-01-26T22:24:37.445"
+ }
+},
+{
+ "model": "commitfest.cfbottask",
+ "pk": 4,
+ "fields": {
+ "task_id": "4562",
+ "task_name": "MacOS build",
+ "patch": 3,
+ "branch_id": 567,
+ "position": 2,
+ "status": "EXECUTING",
+ "created": "2025-01-26T22:25:15.283",
+ "modified": "2025-01-26T22:27:09.055"
+ }
+},
+{
+ "model": "commitfest.cfbottask",
+ "pk": 5,
+ "fields": {
+ "task_id": "4563",
+ "task_name": "FreeBSD",
+ "patch": 3,
+ "branch_id": 567,
+ "position": 3,
+ "status": "FAILED",
+ "created": "2025-01-26T22:25:48.021",
+ "modified": "2025-01-26T22:25:48.021"
+ }
+},
+{
+ "model": "commitfest.cfbottask",
+ "pk": 6,
+ "fields": {
+ "task_id": "4564",
+ "task_name": "NetBSD",
+ "patch": 3,
+ "branch_id": 567,
+ "position": 4,
+ "status": "CREATED",
+ "created": "2025-01-26T22:29:09.156",
+ "modified": "2025-01-26T22:29:09.156"
+ }
+},
+{
+ "model": "commitfest.cfbottask",
+ "pk": 7,
+ "fields": {
+ "task_id": "4565",
+ "task_name": "Linux Valgrind",
+ "patch": 3,
+ "branch_id": 567,
+ "position": 5,
+ "status": "SCHEDULED",
+ "created": "2025-01-26T22:30:03.199",
+ "modified": "2025-01-26T22:30:03.199"
+ }
+}
+]
diff --git a/pgcommitfest/commitfest/forms.py b/pgcommitfest/commitfest/forms.py
index 61d90463..1353fc2d 100644
--- a/pgcommitfest/commitfest/forms.py
+++ b/pgcommitfest/commitfest/forms.py
@@ -1,13 +1,13 @@
from django import forms
+from django.contrib.auth.models import User
+from django.db.models import Q
from django.forms import ValidationError
from django.forms.widgets import HiddenInput
-from django.db.models import Q
-from django.contrib.auth.models import User
from django.http import Http404
-from .models import Patch, MailThread, PatchOnCommitFest, TargetVersion
-from .widgets import ThreadPickWidget
from .ajax import _archivesAPI
+from .models import MailThread, Patch, PatchOnCommitFest, TargetVersion
+from .widgets import ThreadPickWidget
class CommitFestFilterForm(forms.Form):
@@ -21,36 +21,60 @@ class CommitFestFilterForm(forms.Form):
def __init__(self, cf, *args, **kwargs):
super(CommitFestFilterForm, self).__init__(*args, **kwargs)
- self.fields['sortkey'].widget = forms.HiddenInput()
+ self.fields["sortkey"].widget = forms.HiddenInput()
- c = [(-1, '* All')] + list(PatchOnCommitFest._STATUS_CHOICES)
- self.fields['status'] = forms.ChoiceField(choices=c, required=False)
+ c = [(-1, "* All")] + list(PatchOnCommitFest._STATUS_CHOICES)
+ self.fields["status"] = forms.ChoiceField(choices=c, required=False)
q = Q(patch_author__commitfests=cf) | Q(patch_reviewer__commitfests=cf)
- userchoices = [(-1, '* All'), (-2, '* None'), (-3, '* Yourself')] + [(u.id, '%s %s (%s)' % (u.first_name, u.last_name, u.username)) for u in User.objects.filter(q).distinct().order_by('first_name', 'last_name')]
- self.fields['targetversion'] = forms.ChoiceField(choices=[('-1', '* All'), ('-2', '* None')] + [(v.id, v.version) for v in TargetVersion.objects.all()], required=False, label="Target version")
- self.fields['author'] = forms.ChoiceField(choices=userchoices, required=False)
- self.fields['reviewer'] = forms.ChoiceField(choices=userchoices, required=False)
-
- for f in ('status', 'author', 'reviewer',):
- self.fields[f].widget.attrs = {'class': 'input-medium'}
+ userchoices = [(-1, "* All"), (-2, "* None"), (-3, "* Yourself")] + [
+ (u.id, "%s %s (%s)" % (u.first_name, u.last_name, u.username))
+ for u in User.objects.filter(q)
+ .distinct()
+ .order_by("first_name", "last_name")
+ ]
+ self.fields["targetversion"] = forms.ChoiceField(
+ choices=[("-1", "* All"), ("-2", "* None")]
+ + [(v.id, v.version) for v in TargetVersion.objects.all()],
+ required=False,
+ label="Target version",
+ )
+ self.fields["author"] = forms.ChoiceField(choices=userchoices, required=False)
+ self.fields["reviewer"] = forms.ChoiceField(choices=userchoices, required=False)
+
+ for f in (
+ "status",
+ "author",
+ "reviewer",
+ ):
+ self.fields[f].widget.attrs = {"class": "input-medium"}
class PatchForm(forms.ModelForm):
selectize_multiple_fields = {
- 'authors': '/lookups/user',
- 'reviewers': '/lookups/user',
+ "authors": "/lookups/user",
+ "reviewers": "/lookups/user",
}
class Meta:
model = Patch
- exclude = ('commitfests', 'mailthreads', 'modified', 'lastmail', 'subscribers', )
+ exclude = (
+ "commitfests",
+ "mailthread_set",
+ "modified",
+ "lastmail",
+ "subscribers",
+ )
def __init__(self, *args, **kwargs):
super(PatchForm, self).__init__(*args, **kwargs)
- self.fields['authors'].help_text = 'Enter part of name to see list'
- self.fields['reviewers'].help_text = 'Enter part of name to see list'
- self.fields['committer'].label_from_instance = lambda x: '%s %s (%s)' % (x.user.first_name, x.user.last_name, x.user.username)
+ self.fields["authors"].help_text = "Enter part of name to see list"
+ self.fields["reviewers"].help_text = "Enter part of name to see list"
+ self.fields["committer"].label_from_instance = lambda x: "%s %s (%s)" % (
+ x.user.first_name,
+ x.user.last_name,
+ x.user.username,
+ )
# Selectize multiple fields -- don't pre-populate everything
for field, url in list(self.selectize_multiple_fields.items()):
@@ -64,89 +88,128 @@ def __init__(self, *args, **kwargs):
vals = [o.pk for o in getattr(self.instance, field).all()]
else:
vals = []
- if 'data' in kwargs and str(field) in kwargs['data']:
- vals.extend([x for x in kwargs['data'].getlist(field)])
- self.fields[field].widget.attrs['data-selecturl'] = url
- self.fields[field].queryset = self.fields[field].queryset.filter(pk__in=set(vals))
- self.fields[field].label_from_instance = lambda u: '{} ({})'.format(u.username, u.get_full_name())
-
+ if "data" in kwargs and str(field) in kwargs["data"]:
+ vals.extend([x for x in kwargs["data"].getlist(field)])
+ self.fields[field].widget.attrs["data-selecturl"] = url
+ self.fields[field].queryset = self.fields[field].queryset.filter(
+ pk__in=set(vals)
+ )
+ self.fields[field].label_from_instance = lambda u: "{} ({})".format(
+ u.username, u.get_full_name()
+ )
+
+
+class NewPatchForm(PatchForm):
+ # Put threadmsgid first
+ field_order = ["threadmsgid"]
+
+ threadmsgid = forms.CharField(
+ max_length=200,
+ required=True,
+ label="Specify thread msgid",
+ widget=ThreadPickWidget,
+ )
-class NewPatchForm(forms.ModelForm):
- threadmsgid = forms.CharField(max_length=200, required=True, label='Specify thread msgid', widget=ThreadPickWidget)
-# patchfile = forms.FileField(allow_empty_file=False, max_length=50000, label='or upload patch file', required=False, help_text='This may be supported sometime in the future, and would then autogenerate a mail to the hackers list. At such a time, the threadmsgid would no longer be required.')
+ def __init__(self, *args, **kwargs):
+ request = kwargs.pop("request", None)
+ super(NewPatchForm, self).__init__(*args, **kwargs)
- class Meta:
- model = Patch
- fields = ('name', 'topic', )
+ if request:
+ self.fields["authors"].queryset = User.objects.filter(pk=request.user.id)
+ self.fields["authors"].initial = [request.user.id]
def clean_threadmsgid(self):
try:
- _archivesAPI('/message-id.json/%s' % self.cleaned_data['threadmsgid'])
+ _archivesAPI("/message-id.json/%s" % self.cleaned_data["threadmsgid"])
except Http404:
raise ValidationError("Message not found in archives")
except Exception:
raise ValidationError("Error in API call to validate thread")
- return self.cleaned_data['threadmsgid']
+ return self.cleaned_data["threadmsgid"]
def _fetch_thread_choices(patch):
- for mt in patch.mailthread_set.order_by('-latestmessage'):
- ti = sorted(_archivesAPI('/message-id.json/%s' % mt.messageid), key=lambda x: x['date'], reverse=True)
- yield [mt.subject,
- [('%s,%s' % (mt.messageid, t['msgid']), 'From %s at %s' % (t['from'], t['date'])) for t in ti]]
+ for mt in patch.mailthread_set.order_by("-latestmessage"):
+ ti = sorted(
+ _archivesAPI("/message-id.json/%s" % mt.messageid),
+ key=lambda x: x["date"],
+ reverse=True,
+ )
+ yield [
+ mt.subject,
+ [
+ (
+ "%s,%s" % (mt.messageid, t["msgid"]),
+ "From %s at %s" % (t["from"], t["date"]),
+ )
+ for t in ti
+ ],
+ ]
review_state_choices = (
- (0, 'Tested'),
- (1, 'Passed'),
+ (0, "Tested"),
+ (1, "Passed"),
)
def reviewfield(label):
- return forms.MultipleChoiceField(choices=review_state_choices, label=label, widget=forms.CheckboxSelectMultiple, required=False)
+ return forms.MultipleChoiceField(
+ choices=review_state_choices,
+ label=label,
+ widget=forms.CheckboxSelectMultiple,
+ required=False,
+ )
class CommentForm(forms.Form):
- responseto = forms.ChoiceField(choices=[], required=True, label='In response to')
+ responseto = forms.ChoiceField(choices=[], required=True, label="In response to")
# Specific checkbox fields for reviews
- review_installcheck = reviewfield('make installcheck-world')
- review_implements = reviewfield('Implements feature')
- review_spec = reviewfield('Spec compliant')
- review_doc = reviewfield('Documentation')
+ review_installcheck = reviewfield("make installcheck-world")
+ review_implements = reviewfield("Implements feature")
+ review_spec = reviewfield("Spec compliant")
+ review_doc = reviewfield("Documentation")
message = forms.CharField(required=True, widget=forms.Textarea)
- newstatus = forms.ChoiceField(choices=PatchOnCommitFest.OPEN_STATUS_CHOICES(), label='New status')
+ newstatus = forms.ChoiceField(
+ choices=PatchOnCommitFest.OPEN_STATUS_CHOICES(), label="New status"
+ )
def __init__(self, patch, poc, is_review, *args, **kwargs):
super(CommentForm, self).__init__(*args, **kwargs)
self.is_review = is_review
- self.fields['responseto'].choices = _fetch_thread_choices(patch)
- self.fields['newstatus'].initial = poc.status
+ self.fields["responseto"].choices = _fetch_thread_choices(patch)
+ self.fields["newstatus"].initial = poc.status
if not is_review:
- del self.fields['review_installcheck']
- del self.fields['review_implements']
- del self.fields['review_spec']
- del self.fields['review_doc']
+ del self.fields["review_installcheck"]
+ del self.fields["review_implements"]
+ del self.fields["review_spec"]
+ del self.fields["review_doc"]
def clean_responseto(self):
try:
- (threadid, respid) = self.cleaned_data['responseto'].split(',')
+ (threadid, respid) = self.cleaned_data["responseto"].split(",")
self.thread = MailThread.objects.get(messageid=threadid)
self.respid = respid
except MailThread.DoesNotExist:
- raise ValidationError('Selected thread appears to no longer exist')
+ raise ValidationError("Selected thread appears to no longer exist")
except Exception:
- raise ValidationError('Invalid message selected')
- return self.cleaned_data['responseto']
+ raise ValidationError("Invalid message selected")
+ return self.cleaned_data["responseto"]
def clean(self):
if self.is_review:
for fn, f in self.fields.items():
- if fn.startswith('review_') and fn in self.cleaned_data:
- if '1' in self.cleaned_data[fn] and '0' not in self.cleaned_data[fn]:
- self.errors[fn] = (('Cannot pass a test without performing it!'),)
+ if fn.startswith("review_") and fn in self.cleaned_data:
+ if (
+ "1" in self.cleaned_data[fn]
+ and "0" not in self.cleaned_data[fn]
+ ):
+ self.errors[fn] = (
+ ("Cannot pass a test without performing it!"),
+ )
return self.cleaned_data
@@ -155,7 +218,7 @@ class BulkEmailForm(forms.Form):
authors = forms.CharField(required=False, widget=HiddenInput())
subject = forms.CharField(required=True)
body = forms.CharField(required=True, widget=forms.Textarea)
- confirm = forms.BooleanField(required=True, label='Check to confirm sending')
+ confirm = forms.BooleanField(required=True, label="Check to confirm sending")
def __init__(self, *args, **kwargs):
super(BulkEmailForm, self).__init__(*args, **kwargs)
diff --git a/pgcommitfest/commitfest/lookups.py b/pgcommitfest/commitfest/lookups.py
index 229459c6..76700912 100644
--- a/pgcommitfest/commitfest/lookups.py
+++ b/pgcommitfest/commitfest/lookups.py
@@ -1,22 +1,35 @@
-from django.http import HttpResponse, Http404
-from django.db.models import Q
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
+from django.db.models import Q
+from django.http import Http404, HttpResponse
import json
@login_required
def userlookup(request):
- query = request.GET.get('query', None)
+ query = request.GET.get("query", None)
if not query:
raise Http404()
users = User.objects.filter(
Q(is_active=True),
- Q(username__icontains=query) | Q(first_name__icontains=query) | Q(last_name__icontains=query),
+ Q(username__icontains=query)
+ | Q(first_name__icontains=query)
+ | Q(last_name__icontains=query),
)
- return HttpResponse(json.dumps({
- 'values': [{'id': u.id, 'value': '{} ({})'.format(u.username, u.get_full_name())} for u in users],
- }), content_type='application/json')
+ return HttpResponse(
+ json.dumps(
+ {
+ "values": [
+ {
+ "id": u.id,
+ "value": "{} ({})".format(u.username, u.get_full_name()),
+ }
+ for u in users
+ ],
+ }
+ ),
+ content_type="application/json",
+ )
diff --git a/pgcommitfest/commitfest/management/commands/send_notifications.py b/pgcommitfest/commitfest/management/commands/send_notifications.py
index cb2ef143..728c7f99 100644
--- a/pgcommitfest/commitfest/management/commands/send_notifications.py
+++ b/pgcommitfest/commitfest/management/commands/send_notifications.py
@@ -1,12 +1,10 @@
+from django.conf import settings
from django.core.management.base import BaseCommand
from django.db import transaction
-from django.conf import settings
-
-from io import StringIO
from pgcommitfest.commitfest.models import PendingNotification
-from pgcommitfest.userprofile.models import UserProfile
from pgcommitfest.mailqueue.util import send_template_mail
+from pgcommitfest.userprofile.models import UserProfile
class Command(BaseCommand):
@@ -17,17 +15,24 @@ def handle(self, *args, **options):
# Django doesn't do proper group by in the ORM, so we have to
# build our own.
matches = {}
- for n in PendingNotification.objects.all().order_by('user', 'history__patch__id', 'history__id'):
+ for n in PendingNotification.objects.all().order_by(
+ "user", "history__patch__id", "history__id"
+ ):
if n.user.id not in matches:
- matches[n.user.id] = {'user': n.user, 'patches': {}}
- if n.history.patch.id not in matches[n.user.id]['patches']:
- matches[n.user.id]['patches'][n.history.patch.id] = {'patch': n.history.patch, 'entries': []}
- matches[n.user.id]['patches'][n.history.patch.id]['entries'].append(n.history)
+ matches[n.user.id] = {"user": n.user, "patches": {}}
+ if n.history.patch.id not in matches[n.user.id]["patches"]:
+ matches[n.user.id]["patches"][n.history.patch.id] = {
+ "patch": n.history.patch,
+ "entries": [],
+ }
+ matches[n.user.id]["patches"][n.history.patch.id]["entries"].append(
+ n.history
+ )
n.delete()
# Ok, now let's build emails from this
for v in matches.values():
- user = v['user']
+ user = v["user"]
email = user.email
try:
if user.userprofile and user.userprofile.notifyemail:
@@ -35,13 +40,14 @@ def handle(self, *args, **options):
except UserProfile.DoesNotExist:
pass
- send_template_mail(settings.NOTIFICATION_FROM,
- None,
- email,
- "PostgreSQL commitfest updates",
- 'mail/patch_notify.txt',
- {
- 'user': user,
- 'patches': v['patches'],
- },
- )
+ send_template_mail(
+ settings.NOTIFICATION_FROM,
+ None,
+ email,
+ "PostgreSQL commitfest updates",
+ "mail/patch_notify.txt",
+ {
+ "user": user,
+ "patches": v["patches"],
+ },
+ )
diff --git a/pgcommitfest/commitfest/migrations/0001_initial.py b/pgcommitfest/commitfest/migrations/0001_initial.py
index a58a5e18..aa688d7e 100644
--- a/pgcommitfest/commitfest/migrations/0001_initial.py
+++ b/pgcommitfest/commitfest/migrations/0001_initial.py
@@ -1,183 +1,327 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
-from django.db import migrations, models
from django.conf import settings
+from django.db import migrations, models
+
import pgcommitfest.commitfest.util
class Migration(migrations.Migration):
-
dependencies = [
- ('auth', '0006_require_contenttypes_0002'),
+ ("auth", "0006_require_contenttypes_0002"),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
- name='CommitFest',
+ name="CommitFest",
fields=[
- ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('name', models.CharField(unique=True, max_length=100)),
- ('status', models.IntegerField(default=1, choices=[(1, 'Future'), (2, 'Open'), (3, 'In Progress'), (4, 'Closed')])),
- ('startdate', models.DateField(null=True, blank=True)),
- ('enddate', models.DateField(null=True, blank=True)),
+ (
+ "id",
+ models.AutoField(
+ verbose_name="ID",
+ serialize=False,
+ auto_created=True,
+ primary_key=True,
+ ),
+ ),
+ ("name", models.CharField(unique=True, max_length=100)),
+ (
+ "status",
+ models.IntegerField(
+ default=1,
+ choices=[
+ (1, "Future"),
+ (2, "Open"),
+ (3, "In Progress"),
+ (4, "Closed"),
+ ],
+ ),
+ ),
+ ("startdate", models.DateField(null=True, blank=True)),
+ ("enddate", models.DateField(null=True, blank=True)),
],
options={
- 'ordering': ('-startdate',),
- 'verbose_name_plural': 'Commitfests',
+ "ordering": ("-startdate",),
+ "verbose_name_plural": "Commitfests",
},
),
migrations.CreateModel(
- name='Committer',
+ name="Committer",
fields=[
- ('user', models.OneToOneField(primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
- ('active', models.BooleanField(default=True)),
+ (
+ "user",
+ models.OneToOneField(
+ primary_key=True,
+ serialize=False,
+ to=settings.AUTH_USER_MODEL,
+ on_delete=models.CASCADE,
+ ),
+ ),
+ ("active", models.BooleanField(default=True)),
],
options={
- 'ordering': ('user__last_name', 'user__first_name'),
+ "ordering": ("user__last_name", "user__first_name"),
},
),
migrations.CreateModel(
- name='MailThread',
+ name="MailThread",
fields=[
- ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('messageid', models.CharField(unique=True, max_length=1000)),
- ('subject', models.CharField(max_length=500)),
- ('firstmessage', models.DateTimeField()),
- ('firstauthor', models.CharField(max_length=500)),
- ('latestmessage', models.DateTimeField()),
- ('latestauthor', models.CharField(max_length=500)),
- ('latestsubject', models.CharField(max_length=500)),
- ('latestmsgid', models.CharField(max_length=1000)),
+ (
+ "id",
+ models.AutoField(
+ verbose_name="ID",
+ serialize=False,
+ auto_created=True,
+ primary_key=True,
+ ),
+ ),
+ ("messageid", models.CharField(unique=True, max_length=1000)),
+ ("subject", models.CharField(max_length=500)),
+ ("firstmessage", models.DateTimeField()),
+ ("firstauthor", models.CharField(max_length=500)),
+ ("latestmessage", models.DateTimeField()),
+ ("latestauthor", models.CharField(max_length=500)),
+ ("latestsubject", models.CharField(max_length=500)),
+ ("latestmsgid", models.CharField(max_length=1000)),
],
options={
- 'ordering': ('firstmessage',),
+ "ordering": ("firstmessage",),
},
),
migrations.CreateModel(
- name='MailThreadAnnotation',
+ name="MailThreadAnnotation",
fields=[
- ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('date', models.DateTimeField(auto_now_add=True)),
- ('msgid', models.CharField(max_length=1000)),
- ('annotationtext', models.TextField(max_length=2000)),
- ('mailsubject', models.CharField(max_length=500)),
- ('maildate', models.DateTimeField()),
- ('mailauthor', models.CharField(max_length=500)),
- ('mailthread', models.ForeignKey(to='commitfest.MailThread', on_delete=models.CASCADE)),
- ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
+ (
+ "id",
+ models.AutoField(
+ verbose_name="ID",
+ serialize=False,
+ auto_created=True,
+ primary_key=True,
+ ),
+ ),
+ ("date", models.DateTimeField(auto_now_add=True)),
+ ("msgid", models.CharField(max_length=1000)),
+ ("annotationtext", models.TextField(max_length=2000)),
+ ("mailsubject", models.CharField(max_length=500)),
+ ("maildate", models.DateTimeField()),
+ ("mailauthor", models.CharField(max_length=500)),
+ (
+ "mailthread",
+ models.ForeignKey(
+ to="commitfest.MailThread", on_delete=models.CASCADE
+ ),
+ ),
+ (
+ "user",
+ models.ForeignKey(
+ to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE
+ ),
+ ),
],
options={
- 'ordering': ('date',),
+ "ordering": ("date",),
},
),
migrations.CreateModel(
- name='MailThreadAttachment',
+ name="MailThreadAttachment",
fields=[
- ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('messageid', models.CharField(max_length=1000)),
- ('attachmentid', models.IntegerField()),
- ('filename', models.CharField(max_length=1000, blank=True)),
- ('date', models.DateTimeField()),
- ('author', models.CharField(max_length=500)),
- ('ispatch', models.BooleanField(null=True)),
- ('mailthread', models.ForeignKey(to='commitfest.MailThread', on_delete=models.CASCADE)),
+ (
+ "id",
+ models.AutoField(
+ verbose_name="ID",
+ serialize=False,
+ auto_created=True,
+ primary_key=True,
+ ),
+ ),
+ ("messageid", models.CharField(max_length=1000)),
+ ("attachmentid", models.IntegerField()),
+ ("filename", models.CharField(max_length=1000, blank=True)),
+ ("date", models.DateTimeField()),
+ ("author", models.CharField(max_length=500)),
+ ("ispatch", models.BooleanField(null=True)),
+ (
+ "mailthread",
+ models.ForeignKey(
+ to="commitfest.MailThread", on_delete=models.CASCADE
+ ),
+ ),
],
options={
- 'ordering': ('-date',),
+ "ordering": ("-date",),
},
),
migrations.CreateModel(
- name='Patch',
+ name="Patch",
fields=[
- ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('name', models.CharField(max_length=500, verbose_name='Description')),
- ('wikilink', models.URLField(default='', null=False, blank=True)),
- ('gitlink', models.URLField(default='', null=False, blank=True)),
- ('created', models.DateTimeField(auto_now_add=True)),
- ('modified', models.DateTimeField()),
- ('lastmail', models.DateTimeField(null=True, blank=True)),
- ('authors', models.ManyToManyField(related_name='patch_author', to=settings.AUTH_USER_MODEL, blank=True)),
+ (
+ "id",
+ models.AutoField(
+ verbose_name="ID",
+ serialize=False,
+ auto_created=True,
+ primary_key=True,
+ ),
+ ),
+ ("name", models.CharField(max_length=500, verbose_name="Description")),
+ ("wikilink", models.URLField(default="", null=False, blank=True)),
+ ("gitlink", models.URLField(default="", null=False, blank=True)),
+ ("created", models.DateTimeField(auto_now_add=True)),
+ ("modified", models.DateTimeField()),
+ ("lastmail", models.DateTimeField(null=True, blank=True)),
+ (
+ "authors",
+ models.ManyToManyField(
+ related_name="patch_author",
+ to=settings.AUTH_USER_MODEL,
+ blank=True,
+ ),
+ ),
],
options={
- 'verbose_name_plural': 'patches',
+ "verbose_name_plural": "patches",
},
bases=(models.Model, pgcommitfest.commitfest.util.DiffableModel),
),
migrations.CreateModel(
- name='PatchHistory',
+ name="PatchHistory",
fields=[
- ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('date', models.DateTimeField(auto_now_add=True)),
- ('what', models.CharField(max_length=500)),
- ('by', models.ForeignKey(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
- ('patch', models.ForeignKey(to='commitfest.Patch', on_delete=models.CASCADE)),
+ (
+ "id",
+ models.AutoField(
+ verbose_name="ID",
+ serialize=False,
+ auto_created=True,
+ primary_key=True,
+ ),
+ ),
+ ("date", models.DateTimeField(auto_now_add=True)),
+ ("what", models.CharField(max_length=500)),
+ (
+ "by",
+ models.ForeignKey(
+ to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE
+ ),
+ ),
+ (
+ "patch",
+ models.ForeignKey(to="commitfest.Patch", on_delete=models.CASCADE),
+ ),
],
options={
- 'ordering': ('-date',),
+ "ordering": ("-date",),
},
),
migrations.CreateModel(
- name='PatchOnCommitFest',
+ name="PatchOnCommitFest",
fields=[
- ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('enterdate', models.DateTimeField()),
- ('leavedate', models.DateTimeField(null=True, blank=True)),
- ('status', models.IntegerField(default=1, choices=[(1, 'Needs review'), (2, 'Waiting on Author'), (3, 'Ready for Committer'), (4, 'Committed'), (5, 'Moved to next CF'), (6, 'Rejected'), (7, 'Returned with feedback')])),
- ('commitfest', models.ForeignKey(to='commitfest.CommitFest', on_delete=models.CASCADE)),
- ('patch', models.ForeignKey(to='commitfest.Patch', on_delete=models.CASCADE)),
+ (
+ "id",
+ models.AutoField(
+ verbose_name="ID",
+ serialize=False,
+ auto_created=True,
+ primary_key=True,
+ ),
+ ),
+ ("enterdate", models.DateTimeField()),
+ ("leavedate", models.DateTimeField(null=True, blank=True)),
+ (
+ "status",
+ models.IntegerField(
+ default=1,
+ choices=[
+ (1, "Needs review"),
+ (2, "Waiting on Author"),
+ (3, "Ready for Committer"),
+ (4, "Committed"),
+ (5, "Moved to next CF"),
+ (6, "Rejected"),
+ (7, "Returned with feedback"),
+ ],
+ ),
+ ),
+ (
+ "commitfest",
+ models.ForeignKey(
+ to="commitfest.CommitFest", on_delete=models.CASCADE
+ ),
+ ),
+ (
+ "patch",
+ models.ForeignKey(to="commitfest.Patch", on_delete=models.CASCADE),
+ ),
],
options={
- 'ordering': ('-commitfest__startdate',),
+ "ordering": ("-commitfest__startdate",),
},
),
migrations.CreateModel(
- name='PatchStatus',
+ name="PatchStatus",
fields=[
- ('status', models.IntegerField(serialize=False, primary_key=True)),
- ('statusstring', models.TextField(max_length=50)),
- ('sortkey', models.IntegerField(default=10)),
+ ("status", models.IntegerField(serialize=False, primary_key=True)),
+ ("statusstring", models.TextField(max_length=50)),
+ ("sortkey", models.IntegerField(default=10)),
],
),
migrations.CreateModel(
- name='Topic',
+ name="Topic",
fields=[
- ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('topic', models.CharField(max_length=100)),
+ (
+ "id",
+ models.AutoField(
+ verbose_name="ID",
+ serialize=False,
+ auto_created=True,
+ primary_key=True,
+ ),
+ ),
+ ("topic", models.CharField(max_length=100)),
],
),
migrations.AddField(
- model_name='patch',
- name='commitfests',
- field=models.ManyToManyField(to='commitfest.CommitFest', through='commitfest.PatchOnCommitFest'),
+ model_name="patch",
+ name="commitfests",
+ field=models.ManyToManyField(
+ to="commitfest.CommitFest", through="commitfest.PatchOnCommitFest"
+ ),
),
migrations.AddField(
- model_name='patch',
- name='committer',
- field=models.ForeignKey(blank=True, to='commitfest.Committer', null=True, on_delete=models.CASCADE),
+ model_name="patch",
+ name="committer",
+ field=models.ForeignKey(
+ blank=True,
+ to="commitfest.Committer",
+ null=True,
+ on_delete=models.CASCADE,
+ ),
),
migrations.AddField(
- model_name='patch',
- name='reviewers',
- field=models.ManyToManyField(related_name='patch_reviewer', to=settings.AUTH_USER_MODEL, blank=True),
+ model_name="patch",
+ name="reviewers",
+ field=models.ManyToManyField(
+ related_name="patch_reviewer", to=settings.AUTH_USER_MODEL, blank=True
+ ),
),
migrations.AddField(
- model_name='patch',
- name='topic',
- field=models.ForeignKey(to='commitfest.Topic', on_delete=models.CASCADE),
+ model_name="patch",
+ name="topic",
+ field=models.ForeignKey(to="commitfest.Topic", on_delete=models.CASCADE),
),
migrations.AddField(
- model_name='mailthread',
- name='patches',
- field=models.ManyToManyField(to='commitfest.Patch'),
+ model_name="mailthread",
+ name="patches",
+ field=models.ManyToManyField(to="commitfest.Patch"),
),
migrations.AlterUniqueTogether(
- name='patchoncommitfest',
- unique_together=set([('patch', 'commitfest')]),
+ name="patchoncommitfest",
+ unique_together=set([("patch", "commitfest")]),
),
migrations.AlterUniqueTogether(
- name='mailthreadattachment',
- unique_together=set([('mailthread', 'messageid')]),
+ name="mailthreadattachment",
+ unique_together=set([("mailthread", "messageid")]),
),
]
diff --git a/pgcommitfest/commitfest/migrations/0002_notifications.py b/pgcommitfest/commitfest/migrations/0002_notifications.py
index 7fc2396e..450ddfbe 100644
--- a/pgcommitfest/commitfest/migrations/0002_notifications.py
+++ b/pgcommitfest/commitfest/migrations/0002_notifications.py
@@ -1,29 +1,48 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
-from django.db import migrations, models
from django.conf import settings
+from django.db import migrations, models
class Migration(migrations.Migration):
-
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
- ('commitfest', '0001_initial'),
+ ("commitfest", "0001_initial"),
]
operations = [
migrations.CreateModel(
- name='PendingNotification',
+ name="PendingNotification",
fields=[
- ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('history', models.ForeignKey(to='commitfest.PatchHistory', on_delete=models.CASCADE)),
- ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
+ (
+ "id",
+ models.AutoField(
+ verbose_name="ID",
+ serialize=False,
+ auto_created=True,
+ primary_key=True,
+ ),
+ ),
+ (
+ "history",
+ models.ForeignKey(
+ to="commitfest.PatchHistory", on_delete=models.CASCADE
+ ),
+ ),
+ (
+ "user",
+ models.ForeignKey(
+ to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE
+ ),
+ ),
],
),
migrations.AddField(
- model_name='patch',
- name='subscribers',
- field=models.ManyToManyField(related_name='patch_subscriber', to=settings.AUTH_USER_MODEL, blank=True),
+ model_name="patch",
+ name="subscribers",
+ field=models.ManyToManyField(
+ related_name="patch_subscriber", to=settings.AUTH_USER_MODEL, blank=True
+ ),
),
]
diff --git a/pgcommitfest/commitfest/migrations/0003_withdrawn_status.py b/pgcommitfest/commitfest/migrations/0003_withdrawn_status.py
index e6cdea95..2f6a5f7d 100644
--- a/pgcommitfest/commitfest/migrations/0003_withdrawn_status.py
+++ b/pgcommitfest/commitfest/migrations/0003_withdrawn_status.py
@@ -5,16 +5,27 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('commitfest', '0002_notifications'),
+ ("commitfest", "0002_notifications"),
]
operations = [
migrations.AlterField(
- model_name='patchoncommitfest',
- name='status',
- field=models.IntegerField(default=1, choices=[(1, 'Needs review'), (2, 'Waiting on Author'), (3, 'Ready for Committer'), (4, 'Committed'), (5, 'Moved to next CF'), (6, 'Rejected'), (7, 'Returned with feedback'), (8, 'Withdrawn')]),
+ model_name="patchoncommitfest",
+ name="status",
+ field=models.IntegerField(
+ default=1,
+ choices=[
+ (1, "Needs review"),
+ (2, "Waiting on Author"),
+ (3, "Ready for Committer"),
+ (4, "Committed"),
+ (5, "Moved to next CF"),
+ (6, "Rejected"),
+ (7, "Returned with feedback"),
+ (8, "Withdrawn"),
+ ],
+ ),
),
migrations.RunSQL("""
INSERT INTO commitfest_patchstatus (status, statusstring, sortkey) VALUES
@@ -28,5 +39,7 @@ class Migration(migrations.Migration):
(8,'Withdrawn', 50)
ON CONFLICT (status) DO UPDATE SET statusstring=excluded.statusstring, sortkey=excluded.sortkey;
"""),
- migrations.RunSQL("DELETE FROM commitfest_patchstatus WHERE status < 1 OR status > 8"),
+ migrations.RunSQL(
+ "DELETE FROM commitfest_patchstatus WHERE status < 1 OR status > 8"
+ ),
]
diff --git a/pgcommitfest/commitfest/migrations/0004_target_version.py b/pgcommitfest/commitfest/migrations/0004_target_version.py
index b307883d..ad546109 100644
--- a/pgcommitfest/commitfest/migrations/0004_target_version.py
+++ b/pgcommitfest/commitfest/migrations/0004_target_version.py
@@ -2,30 +2,45 @@
# Generated by Django 1.11.17 on 2019-02-06 19:43
from __future__ import unicode_literals
-from django.db import migrations, models
import django.db.models.deletion
+from django.db import migrations, models
class Migration(migrations.Migration):
-
dependencies = [
- ('commitfest', '0003_withdrawn_status'),
+ ("commitfest", "0003_withdrawn_status"),
]
operations = [
migrations.CreateModel(
- name='TargetVersion',
+ name="TargetVersion",
fields=[
- ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('version', models.CharField(max_length=8, unique=True)),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ ("version", models.CharField(max_length=8, unique=True)),
],
options={
- 'ordering': ['-version', ],
+ "ordering": [
+ "-version",
+ ],
},
),
migrations.AddField(
- model_name='patch',
- name='targetversion',
- field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='commitfest.TargetVersion', verbose_name='Target version'),
+ model_name="patch",
+ name="targetversion",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="commitfest.TargetVersion",
+ verbose_name="Target version",
+ ),
),
]
diff --git a/pgcommitfest/commitfest/migrations/0005_history_dateindex.py b/pgcommitfest/commitfest/migrations/0005_history_dateindex.py
index c7be8fcc..4316f212 100644
--- a/pgcommitfest/commitfest/migrations/0005_history_dateindex.py
+++ b/pgcommitfest/commitfest/migrations/0005_history_dateindex.py
@@ -6,15 +6,14 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('commitfest', '0004_target_version'),
+ ("commitfest", "0004_target_version"),
]
operations = [
migrations.AlterField(
- model_name='patchhistory',
- name='date',
+ model_name="patchhistory",
+ name="date",
field=models.DateTimeField(auto_now_add=True, db_index=True),
),
]
diff --git a/pgcommitfest/commitfest/migrations/0006_cfbot_integration.py b/pgcommitfest/commitfest/migrations/0006_cfbot_integration.py
new file mode 100644
index 00000000..0a1ee6b8
--- /dev/null
+++ b/pgcommitfest/commitfest/migrations/0006_cfbot_integration.py
@@ -0,0 +1,120 @@
+# Generated by Django 4.2.17 on 2024-12-21 14:15
+
+import django.db.models.deletion
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+ dependencies = [
+ ("commitfest", "0005_history_dateindex"),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name="CfbotBranch",
+ fields=[
+ (
+ "patch",
+ models.OneToOneField(
+ on_delete=django.db.models.deletion.CASCADE,
+ primary_key=True,
+ related_name="cfbot_branch",
+ serialize=False,
+ to="commitfest.patch",
+ ),
+ ),
+ ("branch_id", models.IntegerField()),
+ ("branch_name", models.TextField()),
+ ("commit_id", models.TextField(blank=True, null=True)),
+ ("apply_url", models.TextField()),
+ (
+ "status",
+ models.TextField(
+ choices=[
+ ("testing", "Testing"),
+ ("finished", "Finished"),
+ ("failed", "Failed"),
+ ("timeout", "Timeout"),
+ ]
+ ),
+ ),
+ ("created", models.DateTimeField(auto_now_add=True)),
+ ("modified", models.DateTimeField(auto_now=True)),
+ ],
+ ),
+ migrations.CreateModel(
+ name="CfbotTask",
+ fields=[
+ ("id", models.BigAutoField(primary_key=True, serialize=False)),
+ ("task_id", models.TextField(unique=True)),
+ ("task_name", models.TextField()),
+ ("branch_id", models.IntegerField()),
+ ("position", models.IntegerField()),
+ (
+ "status",
+ models.TextField(
+ choices=[
+ ("CREATED", "Created"),
+ ("NEEDS_APPROVAL", "Needs Approval"),
+ ("TRIGGERED", "Triggered"),
+ ("EXECUTING", "Executing"),
+ ("FAILED", "Failed"),
+ ("COMPLETED", "Completed"),
+ ("SCHEDULED", "Scheduled"),
+ ("ABORTED", "Aborted"),
+ ("ERRORED", "Errored"),
+ ]
+ ),
+ ),
+ ("created", models.DateTimeField(auto_now_add=True)),
+ ("modified", models.DateTimeField(auto_now=True)),
+ (
+ "patch",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="cfbot_tasks",
+ to="commitfest.patch",
+ ),
+ ),
+ ],
+ ),
+ migrations.RunSQL(
+ """
+ CREATE TYPE cfbotbranch_status AS ENUM (
+ 'testing',
+ 'finished',
+ 'failed',
+ 'timeout'
+ );
+ """
+ ),
+ migrations.RunSQL(
+ """
+ CREATE TYPE cfbottask_status AS ENUM (
+ 'CREATED',
+ 'NEEDS_APPROVAL',
+ 'TRIGGERED',
+ 'EXECUTING',
+ 'FAILED',
+ 'COMPLETED',
+ 'SCHEDULED',
+ 'ABORTED',
+ 'ERRORED'
+ );
+ """
+ ),
+ migrations.RunSQL(
+ """
+ ALTER TABLE commitfest_cfbotbranch
+ ALTER COLUMN status TYPE cfbotbranch_status
+ USING status::cfbotbranch_status;
+ """
+ ),
+ migrations.RunSQL(
+ """
+ ALTER TABLE commitfest_cfbottask
+ ALTER COLUMN status TYPE cfbottask_status
+ USING status::cfbottask_status;
+ """
+ ),
+ ]
diff --git a/pgcommitfest/commitfest/migrations/0007_needs_rebase_emails.py b/pgcommitfest/commitfest/migrations/0007_needs_rebase_emails.py
new file mode 100644
index 00000000..cd3d291d
--- /dev/null
+++ b/pgcommitfest/commitfest/migrations/0007_needs_rebase_emails.py
@@ -0,0 +1,46 @@
+# Generated by Django 4.2.17 on 2024-12-25 11:17
+
+import django.db.models.deletion
+from django.conf import settings
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+ dependencies = [
+ migrations.swappable_dependency(settings.AUTH_USER_MODEL),
+ ("commitfest", "0006_cfbot_integration"),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="cfbotbranch",
+ name="needs_rebase_since",
+ field=models.DateTimeField(blank=True, null=True),
+ ),
+ migrations.AddField(
+ model_name="patchhistory",
+ name="by_cfbot",
+ field=models.BooleanField(default=False),
+ ),
+ migrations.AlterField(
+ model_name="patchhistory",
+ name="by",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to=settings.AUTH_USER_MODEL,
+ ),
+ ),
+ migrations.AddConstraint(
+ model_name="patchhistory",
+ constraint=models.CheckConstraint(
+ check=models.Q(
+ models.Q(("by_cfbot", True), ("by__isnull", True)),
+ models.Q(("by_cfbot", False), ("by__isnull", False)),
+ _connector="OR",
+ ),
+ name="check_by",
+ ),
+ ),
+ ]
diff --git a/pgcommitfest/commitfest/migrations/0008_move_mail_thread_many_to_many.py b/pgcommitfest/commitfest/migrations/0008_move_mail_thread_many_to_many.py
new file mode 100644
index 00000000..de8af8c7
--- /dev/null
+++ b/pgcommitfest/commitfest/migrations/0008_move_mail_thread_many_to_many.py
@@ -0,0 +1,31 @@
+# Generated by Django 4.2.17 on 2025-01-25 11:14
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+ dependencies = [
+ ("commitfest", "0007_needs_rebase_emails"),
+ ]
+
+ operations = [
+ migrations.RunSQL(
+ migrations.RunSQL.noop,
+ reverse_sql=migrations.RunSQL.noop,
+ state_operations=[
+ migrations.RemoveField(
+ model_name="mailthread",
+ name="patches",
+ ),
+ migrations.AddField(
+ model_name="patch",
+ name="mailthread_set",
+ field=models.ManyToManyField(
+ db_table="commitfest_mailthread_patches",
+ related_name="patches",
+ to="commitfest.mailthread",
+ ),
+ ),
+ ],
+ )
+ ]
diff --git a/pgcommitfest/commitfest/migrations/0009_extra_branch_fields.py b/pgcommitfest/commitfest/migrations/0009_extra_branch_fields.py
new file mode 100644
index 00000000..7e53dd3a
--- /dev/null
+++ b/pgcommitfest/commitfest/migrations/0009_extra_branch_fields.py
@@ -0,0 +1,42 @@
+# Generated by Django 4.2.17 on 2025-01-31 11:47
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+ dependencies = [
+ ("commitfest", "0008_move_mail_thread_many_to_many"),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="cfbotbranch",
+ name="all_additions",
+ field=models.IntegerField(blank=True, null=True),
+ ),
+ migrations.AddField(
+ model_name="cfbotbranch",
+ name="all_deletions",
+ field=models.IntegerField(blank=True, null=True),
+ ),
+ migrations.AddField(
+ model_name="cfbotbranch",
+ name="first_additions",
+ field=models.IntegerField(blank=True, null=True),
+ ),
+ migrations.AddField(
+ model_name="cfbotbranch",
+ name="first_deletions",
+ field=models.IntegerField(blank=True, null=True),
+ ),
+ migrations.AddField(
+ model_name="cfbotbranch",
+ name="patch_count",
+ field=models.IntegerField(blank=True, null=True),
+ ),
+ migrations.AddField(
+ model_name="cfbotbranch",
+ name="version",
+ field=models.TextField(blank=True, null=True),
+ ),
+ ]
diff --git a/pgcommitfest/commitfest/models.py b/pgcommitfest/commitfest/models.py
index 28722f06..05956b83 100644
--- a/pgcommitfest/commitfest/models.py
+++ b/pgcommitfest/commitfest/models.py
@@ -1,18 +1,20 @@
-from django.db import models
from django.contrib.auth.models import User
+from django.db import models
from datetime import datetime
-from .util import DiffableModel
-
from pgcommitfest.userprofile.models import UserProfile
+from .util import DiffableModel
+
# We have few enough of these, and it's really the only thing we
# need to extend from the user model, so just create a separate
# class.
class Committer(models.Model):
- user = models.OneToOneField(User, null=False, blank=False, primary_key=True, on_delete=models.CASCADE)
+ user = models.OneToOneField(
+ User, null=False, blank=False, primary_key=True, on_delete=models.CASCADE
+ )
active = models.BooleanField(null=False, blank=False, default=True)
def __str__(self):
@@ -20,10 +22,14 @@ def __str__(self):
@property
def fullname(self):
- return "%s %s (%s)" % (self.user.first_name, self.user.last_name, self.user.username)
+ return "%s %s (%s)" % (
+ self.user.first_name,
+ self.user.last_name,
+ self.user.username,
+ )
class Meta:
- ordering = ('user__last_name', 'user__first_name')
+ ordering = ("user__last_name", "user__first_name")
class CommitFest(models.Model):
@@ -32,13 +38,15 @@ class CommitFest(models.Model):
STATUS_INPROGRESS = 3
STATUS_CLOSED = 4
_STATUS_CHOICES = (
- (STATUS_FUTURE, 'Future'),
- (STATUS_OPEN, 'Open'),
- (STATUS_INPROGRESS, 'In Progress'),
- (STATUS_CLOSED, 'Closed'),
+ (STATUS_FUTURE, "Future"),
+ (STATUS_OPEN, "Open"),
+ (STATUS_INPROGRESS, "In Progress"),
+ (STATUS_CLOSED, "Closed"),
)
name = models.CharField(max_length=100, blank=False, null=False, unique=True)
- status = models.IntegerField(null=False, blank=False, default=1, choices=_STATUS_CHOICES)
+ status = models.IntegerField(
+ null=False, blank=False, default=1, choices=_STATUS_CHOICES
+ )
startdate = models.DateField(blank=True, null=True)
enddate = models.DateField(blank=True, null=True)
@@ -64,8 +72,8 @@ def __str__(self):
return self.name
class Meta:
- verbose_name_plural = 'Commitfests'
- ordering = ('-startdate',)
+ verbose_name_plural = "Commitfests"
+ ordering = ("-startdate",)
class Topic(models.Model):
@@ -79,35 +87,56 @@ class TargetVersion(models.Model):
version = models.CharField(max_length=8, blank=False, null=False, unique=True)
class Meta:
- ordering = ['-version', ]
+ ordering = [
+ "-version",
+ ]
def __str__(self):
return self.version
class Patch(models.Model, DiffableModel):
- name = models.CharField(max_length=500, blank=False, null=False, verbose_name='Description')
+ name = models.CharField(
+ max_length=500, blank=False, null=False, verbose_name="Description"
+ )
topic = models.ForeignKey(Topic, blank=False, null=False, on_delete=models.CASCADE)
# One patch can be in multiple commitfests, if it has history
- commitfests = models.ManyToManyField(CommitFest, through='PatchOnCommitFest')
+ commitfests = models.ManyToManyField(CommitFest, through="PatchOnCommitFest")
# If there is a wiki page discussing this patch
- wikilink = models.URLField(blank=True, null=False, default='')
+ wikilink = models.URLField(blank=True, null=False, default="")
# If there is a git repo about this patch
- gitlink = models.URLField(blank=True, null=False, default='')
+ gitlink = models.URLField(blank=True, null=False, default="")
# Version targeted by this patch
- targetversion = models.ForeignKey(TargetVersion, blank=True, null=True, verbose_name="Target version", on_delete=models.CASCADE)
+ targetversion = models.ForeignKey(
+ TargetVersion,
+ blank=True,
+ null=True,
+ verbose_name="Target version",
+ on_delete=models.CASCADE,
+ )
- authors = models.ManyToManyField(User, related_name='patch_author', blank=True)
- reviewers = models.ManyToManyField(User, related_name='patch_reviewer', blank=True)
+ authors = models.ManyToManyField(User, related_name="patch_author", blank=True)
+ reviewers = models.ManyToManyField(User, related_name="patch_reviewer", blank=True)
- committer = models.ForeignKey(Committer, blank=True, null=True, on_delete=models.CASCADE)
+ committer = models.ForeignKey(
+ Committer, blank=True, null=True, on_delete=models.CASCADE
+ )
# Users to be notified when something happens
- subscribers = models.ManyToManyField(User, related_name='patch_subscriber', blank=True)
+ subscribers = models.ManyToManyField(
+ User, related_name="patch_subscriber", blank=True
+ )
+
+ mailthread_set = models.ManyToManyField(
+ "MailThread",
+ related_name="patches",
+ blank=False,
+ db_table="commitfest_mailthread_patches",
+ )
# Datestamps for tracking activity
created = models.DateTimeField(blank=False, null=False, auto_now_add=True)
@@ -118,24 +147,37 @@ class Patch(models.Model, DiffableModel):
lastmail = models.DateTimeField(blank=True, null=True)
map_manytomany_for_diff = {
- 'authors': 'authors_string',
- 'reviewers': 'reviewers_string',
+ "authors": "authors_string",
+ "reviewers": "reviewers_string",
}
+ def current_commitfest(self):
+ return self.commitfests.order_by("-startdate").first()
+
# Some accessors
@property
def authors_string(self):
- return ", ".join(["%s %s (%s)" % (a.first_name, a.last_name, a.username) for a in self.authors.all()])
+ return ", ".join(
+ [
+ "%s %s (%s)" % (a.first_name, a.last_name, a.username)
+ for a in self.authors.all()
+ ]
+ )
@property
def reviewers_string(self):
- return ", ".join(["%s %s (%s)" % (a.first_name, a.last_name, a.username) for a in self.reviewers.all()])
+ return ", ".join(
+ [
+ "%s %s (%s)" % (a.first_name, a.last_name, a.username)
+ for a in self.reviewers.all()
+ ]
+ )
@property
def history(self):
# Need to wrap this in a function to make sure it calls
# select_related() and doesn't generate a bazillion queries
- return self.patchhistory_set.select_related('by').all()
+ return self.patchhistory_set.select_related("by").all()
def set_modified(self, newmod=None):
# Set the modified date to newmod, but only if that's newer than
@@ -159,7 +201,7 @@ def __str__(self):
return self.name
class Meta:
- verbose_name_plural = 'patches'
+ verbose_name_plural = "patches"
class PatchOnCommitFest(models.Model):
@@ -176,24 +218,24 @@ class PatchOnCommitFest(models.Model):
STATUS_RETURNED = 7
STATUS_WITHDRAWN = 8
_STATUS_CHOICES = (
- (STATUS_REVIEW, 'Needs review'),
- (STATUS_AUTHOR, 'Waiting on Author'),
- (STATUS_COMMITTER, 'Ready for Committer'),
- (STATUS_COMMITTED, 'Committed'),
- (STATUS_NEXT, 'Moved to next CF'),
- (STATUS_REJECTED, 'Rejected'),
- (STATUS_RETURNED, 'Returned with feedback'),
- (STATUS_WITHDRAWN, 'Withdrawn'),
+ (STATUS_REVIEW, "Needs review"),
+ (STATUS_AUTHOR, "Waiting on Author"),
+ (STATUS_COMMITTER, "Ready for Committer"),
+ (STATUS_COMMITTED, "Committed"),
+ (STATUS_NEXT, "Moved to next CF"),
+ (STATUS_REJECTED, "Rejected"),
+ (STATUS_RETURNED, "Returned with feedback"),
+ (STATUS_WITHDRAWN, "Withdrawn"),
)
_STATUS_LABELS = (
- (STATUS_REVIEW, 'default'),
- (STATUS_AUTHOR, 'primary'),
- (STATUS_COMMITTER, 'info'),
- (STATUS_COMMITTED, 'success'),
- (STATUS_NEXT, 'warning'),
- (STATUS_REJECTED, 'danger'),
- (STATUS_RETURNED, 'danger'),
- (STATUS_WITHDRAWN, 'danger'),
+ (STATUS_REVIEW, "default"),
+ (STATUS_AUTHOR, "primary"),
+ (STATUS_COMMITTER, "info"),
+ (STATUS_COMMITTED, "success"),
+ (STATUS_NEXT, "warning"),
+ (STATUS_REJECTED, "danger"),
+ (STATUS_RETURNED, "danger"),
+ (STATUS_WITHDRAWN, "danger"),
)
OPEN_STATUSES = [STATUS_REVIEW, STATUS_AUTHOR, STATUS_COMMITTER]
@@ -202,11 +244,15 @@ def OPEN_STATUS_CHOICES(cls):
return [x for x in cls._STATUS_CHOICES if x[0] in cls.OPEN_STATUSES]
patch = models.ForeignKey(Patch, blank=False, null=False, on_delete=models.CASCADE)
- commitfest = models.ForeignKey(CommitFest, blank=False, null=False, on_delete=models.CASCADE)
+ commitfest = models.ForeignKey(
+ CommitFest, blank=False, null=False, on_delete=models.CASCADE
+ )
enterdate = models.DateTimeField(blank=False, null=False)
leavedate = models.DateTimeField(blank=True, null=True)
- status = models.IntegerField(blank=False, null=False, default=STATUS_REVIEW, choices=_STATUS_CHOICES)
+ status = models.IntegerField(
+ blank=False, null=False, default=STATUS_REVIEW, choices=_STATUS_CHOICES
+ )
@property
def is_closed(self):
@@ -217,56 +263,95 @@ def statusstring(self):
return [v for k, v in self._STATUS_CHOICES if k == self.status][0]
class Meta:
- unique_together = (('patch', 'commitfest',),)
- ordering = ('-commitfest__startdate', )
+ unique_together = (
+ (
+ "patch",
+ "commitfest",
+ ),
+ )
+ ordering = ("-commitfest__startdate",)
class PatchHistory(models.Model):
patch = models.ForeignKey(Patch, blank=False, null=False, on_delete=models.CASCADE)
- date = models.DateTimeField(blank=False, null=False, auto_now_add=True, db_index=True)
- by = models.ForeignKey(User, blank=False, null=False, on_delete=models.CASCADE)
+ date = models.DateTimeField(
+ blank=False, null=False, auto_now_add=True, db_index=True
+ )
+ by = models.ForeignKey(User, blank=True, null=True, on_delete=models.CASCADE)
+ by_cfbot = models.BooleanField(null=False, blank=False, default=False)
what = models.CharField(max_length=500, null=False, blank=False)
@property
def by_string(self):
+ if self.by_cfbot:
+ return "CFbot"
+
return "%s %s (%s)" % (self.by.first_name, self.by.last_name, self.by.username)
def __str__(self):
return "%s - %s" % (self.patch.name, self.date)
class Meta:
- ordering = ('-date', )
-
- def save_and_notify(self, prevcommitter=None,
- prevreviewers=None, prevauthors=None):
+ ordering = ("-date",)
+ constraints = [
+ models.CheckConstraint(
+ check=(models.Q(by_cfbot=True) & models.Q(by__isnull=True))
+ | (models.Q(by_cfbot=False) & models.Q(by__isnull=False)),
+ name="check_by",
+ ),
+ ]
+
+ def save_and_notify(
+ self,
+ prevcommitter=None,
+ prevreviewers=None,
+ prevauthors=None,
+ authors_only=False,
+ ):
# Save this model, and then trigger notifications if there are any. There are
# many different things that can trigger notifications, so try them all.
self.save()
recipients = []
- recipients.extend(self.patch.subscribers.all())
-
- # Current or previous committer wants all notifications
- try:
- if self.patch.committer and self.patch.committer.user.userprofile.notify_all_committer:
- recipients.append(self.patch.committer.user)
- except UserProfile.DoesNotExist:
- pass
-
- try:
- if prevcommitter and prevcommitter.user.userprofile.notify_all_committer:
- recipients.append(prevcommitter.user)
- except UserProfile.DoesNotExist:
- pass
-
- # Current or previous reviewers wants all notifications
- recipients.extend(self.patch.reviewers.filter(userprofile__notify_all_reviewer=True))
- if prevreviewers:
- # prevreviewers is a list
- recipients.extend(User.objects.filter(id__in=[p.id for p in prevreviewers], userprofile__notify_all_reviewer=True))
+ if not authors_only:
+ recipients.extend(self.patch.subscribers.all())
+
+ # Current or previous committer wants all notifications
+ try:
+ if (
+ self.patch.committer
+ and self.patch.committer.user.userprofile.notify_all_committer
+ ):
+ recipients.append(self.patch.committer.user)
+ except UserProfile.DoesNotExist:
+ pass
+
+ try:
+ if (
+ prevcommitter
+ and prevcommitter.user.userprofile.notify_all_committer
+ ):
+ recipients.append(prevcommitter.user)
+ except UserProfile.DoesNotExist:
+ pass
+
+ # Current or previous reviewers wants all notifications
+ recipients.extend(
+ self.patch.reviewers.filter(userprofile__notify_all_reviewer=True)
+ )
+ if prevreviewers:
+ # prevreviewers is a list
+ recipients.extend(
+ User.objects.filter(
+ id__in=[p.id for p in prevreviewers],
+ userprofile__notify_all_reviewer=True,
+ )
+ )
# Current or previous authors wants all notifications
- recipients.extend(self.patch.authors.filter(userprofile__notify_all_author=True))
+ recipients.extend(
+ self.patch.authors.filter(userprofile__notify_all_author=True)
+ )
for u in set(recipients):
if u != self.by: # Don't notify for changes we make ourselves
@@ -284,7 +369,6 @@ class MailThread(models.Model):
# so we can keep track of when there was last a change on the
# thread in question.
messageid = models.CharField(max_length=1000, null=False, blank=False, unique=True)
- patches = models.ManyToManyField(Patch, blank=False)
subject = models.CharField(max_length=500, null=False, blank=False)
firstmessage = models.DateTimeField(null=False, blank=False)
firstauthor = models.CharField(max_length=500, null=False, blank=False)
@@ -297,11 +381,13 @@ def __str__(self):
return self.subject
class Meta:
- ordering = ('firstmessage', )
+ ordering = ("firstmessage",)
class MailThreadAttachment(models.Model):
- mailthread = models.ForeignKey(MailThread, null=False, blank=False, on_delete=models.CASCADE)
+ mailthread = models.ForeignKey(
+ MailThread, null=False, blank=False, on_delete=models.CASCADE
+ )
messageid = models.CharField(max_length=1000, null=False, blank=False)
attachmentid = models.IntegerField(null=False, blank=False)
filename = models.CharField(max_length=1000, null=False, blank=True)
@@ -310,12 +396,19 @@ class MailThreadAttachment(models.Model):
ispatch = models.BooleanField(null=True)
class Meta:
- ordering = ('-date',)
- unique_together = (('mailthread', 'messageid',), )
+ ordering = ("-date",)
+ unique_together = (
+ (
+ "mailthread",
+ "messageid",
+ ),
+ )
class MailThreadAnnotation(models.Model):
- mailthread = models.ForeignKey(MailThread, null=False, blank=False, on_delete=models.CASCADE)
+ mailthread = models.ForeignKey(
+ MailThread, null=False, blank=False, on_delete=models.CASCADE
+ )
date = models.DateTimeField(null=False, blank=False, auto_now_add=True)
user = models.ForeignKey(User, null=False, blank=False, on_delete=models.CASCADE)
msgid = models.CharField(max_length=1000, null=False, blank=False)
@@ -326,10 +419,14 @@ class MailThreadAnnotation(models.Model):
@property
def user_string(self):
- return "%s %s (%s)" % (self.user.first_name, self.user.last_name, self.user.username)
+ return "%s %s (%s)" % (
+ self.user.first_name,
+ self.user.last_name,
+ self.user.username,
+ )
class Meta:
- ordering = ('date', )
+ ordering = ("date",)
class PatchStatus(models.Model):
@@ -339,5 +436,79 @@ class PatchStatus(models.Model):
class PendingNotification(models.Model):
- history = models.ForeignKey(PatchHistory, blank=False, null=False, on_delete=models.CASCADE)
+ history = models.ForeignKey(
+ PatchHistory, blank=False, null=False, on_delete=models.CASCADE
+ )
user = models.ForeignKey(User, blank=False, null=False, on_delete=models.CASCADE)
+
+
+class CfbotBranch(models.Model):
+ STATUS_CHOICES = [
+ ("testing", "Testing"),
+ ("finished", "Finished"),
+ ("failed", "Failed"),
+ ("timeout", "Timeout"),
+ ]
+
+ patch = models.OneToOneField(
+ Patch, on_delete=models.CASCADE, related_name="cfbot_branch", primary_key=True
+ )
+ branch_id = models.IntegerField(null=False)
+ branch_name = models.TextField(null=False)
+ commit_id = models.TextField(null=True, blank=True)
+ apply_url = models.TextField(null=False)
+ # Actually a postgres enum column
+ status = models.TextField(choices=STATUS_CHOICES, null=False)
+ needs_rebase_since = models.DateTimeField(null=True, blank=True)
+ created = models.DateTimeField(auto_now_add=True)
+ modified = models.DateTimeField(auto_now=True)
+ version = models.TextField(null=True, blank=True)
+ patch_count = models.IntegerField(null=True, blank=True)
+ first_additions = models.IntegerField(null=True, blank=True)
+ first_deletions = models.IntegerField(null=True, blank=True)
+ all_additions = models.IntegerField(null=True, blank=True)
+ all_deletions = models.IntegerField(null=True, blank=True)
+
+ def save(self, *args, **kwargs):
+ """Only used by the admin panel to save empty commit id as NULL
+
+ The actual cfbot webhook doesn't use the django ORM to save the data.
+ """
+
+ if not self.commit_id:
+ self.commit_id = None
+ super(CfbotBranch, self).save(*args, **kwargs)
+
+
+class CfbotTask(models.Model):
+ STATUS_CHOICES = [
+ ("CREATED", "Created"),
+ ("NEEDS_APPROVAL", "Needs Approval"),
+ ("TRIGGERED", "Triggered"),
+ ("EXECUTING", "Executing"),
+ ("FAILED", "Failed"),
+ ("COMPLETED", "Completed"),
+ ("SCHEDULED", "Scheduled"),
+ ("ABORTED", "Aborted"),
+ ("ERRORED", "Errored"),
+ ]
+
+ # This id is only used by Django. Using text type for primary keys, has
+ # historically caused problems.
+ id = models.BigAutoField(primary_key=True)
+ # This is the id used by the external CI system. Currently with CirrusCI
+ # this is an integer, and thus we could probably store it as such. But
+ # given that we might need to change CI providers at some point, and that
+ # CI provider might use e.g. UUIDs, we prefer to consider the format of the
+ # ID opaque and store it as text.
+ task_id = models.TextField(unique=True)
+ task_name = models.TextField(null=False)
+ patch = models.ForeignKey(
+ Patch, on_delete=models.CASCADE, related_name="cfbot_tasks"
+ )
+ branch_id = models.IntegerField(null=False)
+ position = models.IntegerField(null=False)
+ # Actually a postgres enum column
+ status = models.TextField(choices=STATUS_CHOICES, null=False)
+ created = models.DateTimeField(auto_now_add=True)
+ modified = models.DateTimeField(auto_now=True)
diff --git a/pgcommitfest/commitfest/reports.py b/pgcommitfest/commitfest/reports.py
index 88f51a9f..e4191e16 100644
--- a/pgcommitfest/commitfest/reports.py
+++ b/pgcommitfest/commitfest/reports.py
@@ -1,8 +1,7 @@
-from django.shortcuts import render, get_object_or_404
-from django.http import Http404
-from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.db import connection
+from django.http import Http404
+from django.shortcuts import get_object_or_404, render
from .models import CommitFest
@@ -14,7 +13,8 @@ def authorstats(request, cfid):
raise Http404("Only CF Managers can do that.")
cursor = connection.cursor()
- cursor.execute("""
+ cursor.execute(
+ """
WITH patches(id,name) AS (
SELECT p.id, name
FROM commitfest_patch p
@@ -37,13 +37,20 @@ def authorstats(request, cfid):
INNER JOIN auth_user u ON u.id=COALESCE(authors.userid, reviewers.userid)
ORDER BY last_name, first_name
""",
- {
- 'cid': cf.id,
- })
+ {
+ "cid": cf.id,
+ },
+ )
- return render(request, 'report_authors.html', {
- 'cf': cf,
- 'report': cursor.fetchall(),
- 'title': 'Author stats',
- 'breadcrumbs': [{'title': cf.title, 'href': '/%s/' % cf.pk}, ],
- })
+ return render(
+ request,
+ "report_authors.html",
+ {
+ "cf": cf,
+ "report": cursor.fetchall(),
+ "title": "Author stats",
+ "breadcrumbs": [
+ {"title": cf.title, "href": "/%s/" % cf.pk},
+ ],
+ },
+ )
diff --git a/pgcommitfest/commitfest/templates/base.html b/pgcommitfest/commitfest/templates/base.html
index 4a6ba990..382c43bc 100644
--- a/pgcommitfest/commitfest/templates/base.html
+++ b/pgcommitfest/commitfest/templates/base.html
@@ -6,7 +6,7 @@
-
+
{%block extrahead%}{%endblock%}
{%if rss_alternate%} {%endif%}
@@ -43,6 +43,6 @@ {{title}}
-
+
{%block morescript%}{%endblock%}