diff --git a/.github/workflows/algorithm_validation_tests.yml b/.github/workflows/algorithm_validation_tests.yml index ce563d162..e4e7fb0c5 100644 --- a/.github/workflows/algorithm_validation_tests.yml +++ b/.github/workflows/algorithm_validation_tests.yml @@ -120,8 +120,11 @@ jobs: with: run: cat /tmp/exareme2/localworker1.out - - name: Run algorithm validation tests - run: poetry run pytest tests/algorithm_validation_tests --verbosity=4 -n 16 --reruns 3 --reruns-delay 5 + - name: Run Flower algorithm validation tests + run: poetry run pytest tests/algorithm_validation_tests/flower/ --verbosity=4 -n 2 --reruns 6 --reruns-delay 5 + + - name: Run Exareme2 algorithm validation tests + run: poetry run pytest tests/algorithm_validation_tests/exareme2 --verbosity=4 -n 16 --reruns 3 --reruns-delay 5 run_tests_in_five_nodes: runs-on: ubuntu-latest @@ -242,5 +245,8 @@ jobs: with: run: cat /tmp/exareme2/localworker1.out - - name: Run algorithm validation tests - run: poetry run pytest tests/algorithm_validation_tests --verbosity=4 -n 16 --reruns 3 --reruns-delay 5 -k "input1 and not input1-" # run tests 10-19 + - name: Run Flower algorithm validation tests + run: poetry run pytest tests/algorithm_validation_tests/flower/test_logistic_regression.py -n 2 --verbosity=4 --reruns 6 --reruns-delay 5 + + - name: Run Exareme2 algorithm validation tests + run: poetry run pytest tests/algorithm_validation_tests/exareme2/ --verbosity=4 -n 16 -k "input1 and not input1-" # run tests 10-19 diff --git a/.github/workflows/prod_env_tests.yml b/.github/workflows/prod_env_tests.yml index 0f54bc77f..9cb191ef4 100644 --- a/.github/workflows/prod_env_tests.yml +++ b/.github/workflows/prod_env_tests.yml @@ -162,7 +162,6 @@ jobs: kubectl label node master master=true kubectl label node localworker1 worker=true kubectl label node localworker2 worker=true - kubectl label node localworker3 worker=true - name: Get container disk space run: df -h @@ -201,98 +200,86 @@ jobs: - name: Deploy Helm run: helm install exareme2 kubernetes/ --debug - - name: Wait for MONETDB container to start - uses: jakejarvis/wait-action@master - with: - time: '60s' + - name: Wait for pods to get healthy + run: timeout 300 bash -c 'while true; do if kubectl get pods --no-headers | awk '\''{if ($2 != "1/1" && $2 != "2/2" && $2 != "3/3" && $2 != "4/4") exit 1;}'\''; then echo "All pods are ready!"; break; else kubectl get pods -o wide; sleep 20; fi done' - name: Initialize MONETDB from mipdb container run: | - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb init --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb init --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[2].metadata.name') -c db-importer -- sh -c 'mipdb init --ip $DB_IP --port $DB_PORT' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb init' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb init' - name: Load dementia data model into localworkers run: | - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-data-model /opt/data/dementia_v_0_1/CDEsMetadata.json --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-data-model /opt/data/dementia_v_0_1/CDEsMetadata.json --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[2].metadata.name') -c db-importer -- sh -c 'mipdb add-data-model /opt/data/dementia_v_0_1/CDEsMetadata.json --ip $DB_IP --port $DB_PORT' - - - name: Load dementia dataset csvs with suffix '0' into localworker 1 - run: | - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/edsd0.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/ppmi0.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/desd-synthdata0.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-data-model /opt/data/dementia_v_0_1/CDEsMetadata.json' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-data-model /opt/data/dementia_v_0_1/CDEsMetadata.json' - - name: Load dementia dataset csvs with suffix '1,3,5,7,9' into localworker 2 + - name: Load dementia dataset csvs with suffix even numbers into localworker 1 run: | - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/edsd1.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/edsd3.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/edsd5.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/edsd7.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/edsd9.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/ppmi1.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/ppmi3.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/ppmi5.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/ppmi7.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/ppmi9.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/desd-synthdata1.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/desd-synthdata3.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/desd-synthdata5.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/desd-synthdata7.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/desd-synthdata9.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - - - name: Load dementia datasets csvs with suffix '2,4,6,8' into localworker 3 + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/edsd0.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/ppmi0.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/desd-synthdata0.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/edsd2.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/ppmi2.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/desd-synthdata2.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/edsd4.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/ppmi4.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/desd-synthdata4.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/edsd6.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/ppmi6.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/desd-synthdata6.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/edsd8.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/ppmi8.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/desd-synthdata8.csv -d dementia -v 0.1' + + - name: Load dementia dataset csvs with suffix odd numbers into localworker 2 run: | - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[2].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/edsd2.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[2].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/edsd4.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[2].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/edsd6.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[2].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/edsd8.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[2].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/ppmi2.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[2].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/ppmi4.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[2].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/ppmi6.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[2].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/ppmi8.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[2].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/desd-synthdata2.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[2].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/desd-synthdata4.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[2].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/desd-synthdata6.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[2].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/desd-synthdata8.csv -d dementia -v 0.1 --ip $DB_IP --port $DB_PORT' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/edsd1.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/ppmi1.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/desd-synthdata1.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/edsd3.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/ppmi3.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/desd-synthdata3.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/edsd5.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/ppmi5.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/desd-synthdata5.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/edsd7.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/ppmi7.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/desd-synthdata7.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/edsd9.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/ppmi9.csv -d dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/dementia_v_0_1/desd-synthdata9.csv -d dementia -v 0.1' - name: Load tbi data model into localworkers run: | - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-data-model /opt/data/tbi_v_0_1/CDEsMetadata.json --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-data-model /opt/data/tbi_v_0_1/CDEsMetadata.json --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[2].metadata.name') -c db-importer -- sh -c 'mipdb add-data-model /opt/data/tbi_v_0_1/CDEsMetadata.json --ip $DB_IP --port $DB_PORT' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-data-model /opt/data/tbi_v_0_1/CDEsMetadata.json' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-data-model /opt/data/tbi_v_0_1/CDEsMetadata.json' - - name: Load tbi dataset csvs with suffix '0' into localworker 1 + - name: Load tbi dataset csvs with suffix of even numbers into localworker 1 run: | - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/tbi_v_0_1/dummy_tbi0.csv -d tbi -v 0.1 --ip $DB_IP --port $DB_PORT' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/tbi_v_0_1/dummy_tbi0.csv -d tbi -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/tbi_v_0_1/dummy_tbi2.csv -d tbi -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/tbi_v_0_1/dummy_tbi4.csv -d tbi -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/tbi_v_0_1/dummy_tbi6.csv -d tbi -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/tbi_v_0_1/dummy_tbi8.csv -d tbi -v 0.1' - - name: Load tbi dataset csvs with suffix '1,3,5,7,9' into localworker 2 + - name: Load tbi dataset csvs with suffix of odd numbers into localworker 2 run: | - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/tbi_v_0_1/dummy_tbi1.csv -d tbi -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/tbi_v_0_1/dummy_tbi3.csv -d tbi -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/tbi_v_0_1/dummy_tbi5.csv -d tbi -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/tbi_v_0_1/dummy_tbi7.csv -d tbi -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/tbi_v_0_1/dummy_tbi9.csv -d tbi -v 0.1 --ip $DB_IP --port $DB_PORT' - - - name: Load tbi datasets csvs with suffix '2,4,6,8' into localworker 3 - run: | - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[2].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/tbi_v_0_1/dummy_tbi2.csv -d tbi -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[2].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/tbi_v_0_1/dummy_tbi4.csv -d tbi -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[2].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/tbi_v_0_1/dummy_tbi6.csv -d tbi -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[2].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/tbi_v_0_1/dummy_tbi8.csv -d tbi -v 0.1 --ip $DB_IP --port $DB_PORT' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/tbi_v_0_1/dummy_tbi1.csv -d tbi -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/tbi_v_0_1/dummy_tbi3.csv -d tbi -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/tbi_v_0_1/dummy_tbi5.csv -d tbi -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/tbi_v_0_1/dummy_tbi7.csv -d tbi -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/tbi_v_0_1/dummy_tbi9.csv -d tbi -v 0.1' - name: Load longitudinal dementia data model into localworkers run: | - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-data-model /opt/data/longitudinal_dementia_v_0_1/CDEsMetadata.json --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-data-model /opt/data/longitudinal_dementia_v_0_1/CDEsMetadata.json --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[2].metadata.name') -c db-importer -- sh -c 'mipdb add-data-model /opt/data/longitudinal_dementia_v_0_1/CDEsMetadata.json --ip $DB_IP --port $DB_PORT' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-data-model /opt/data/longitudinal_dementia_v_0_1/CDEsMetadata.json' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-data-model /opt/data/longitudinal_dementia_v_0_1/CDEsMetadata.json' - name: Load longitudinal dementia datasets csvs into localworkers run: | - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/longitudinal_dementia_v_0_1/longitudinal_dementia0.csv -d longitudinal_dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/longitudinal_dementia_v_0_1/longitudinal_dementia1.csv -d longitudinal_dementia -v 0.1 --ip $DB_IP --port $DB_PORT' - kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[2].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/longitudinal_dementia_v_0_1/longitudinal_dementia2.csv -d longitudinal_dementia -v 0.1 --ip $DB_IP --port $DB_PORT' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[0].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/longitudinal_dementia_v_0_1/longitudinal_dementia0.csv -d longitudinal_dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/longitudinal_dementia_v_0_1/longitudinal_dementia1.csv -d longitudinal_dementia -v 0.1' + kubectl exec $(kubectl get pods -l=nodeType=localworker -o json | jq -r '.items[1].metadata.name') -c db-importer -- sh -c 'mipdb add-dataset /opt/data/longitudinal_dementia_v_0_1/longitudinal_dementia2.csv -d longitudinal_dementia -v 0.1' - name: Controller logs run: kubectl logs -l app=exareme2-controller --tail -1 @@ -318,10 +305,8 @@ jobs: with: run: kubectl logs -l nodeType=localworker -c worker --tail -1 - - name: Wait for CONTROLLER container to become ready - uses: jakejarvis/wait-action@master - with: - time: '60s' + - name: Run Worker Landscape Aggregator update + run: curl -X POST "http://172.17.0.1:5000/wla" - name: Run Healthcheck run: curl "http://172.17.0.1:5000/healthcheck" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3bfc4afa9..c3a8f46e1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,7 +11,7 @@ repos: - id: debug-statements - repo: https://github.com/psf/black - rev: 22.3.0 + rev: 23.7.0 hooks: - id: black @@ -21,7 +21,7 @@ repos: - id: mdformat - repo: https://github.com/pycqa/isort - rev: 5.10.1 + rev: 5.12.0 hooks: - id: isort args: ["--profile", "black", "--force-single-line-imports"] diff --git a/README.md b/README.md index 0ff85a592..e036dc075 100644 --- a/README.md +++ b/README.md @@ -53,6 +53,7 @@ algorithm_folders = "./exareme2/algorithms/exareme2,./exareme2/algorithms/flower,./tests/algorithms" worker_landscape_aggregator_update_interval = 30 + flower_execution_timeout = 30 celery_tasks_timeout = 20 celery_cleanup_task_timeout=2 celery_run_udf_task_timeout = 120 diff --git a/exareme2/algorithms/flower/flower_data_processing.py b/exareme2/algorithms/flower/flower_data_processing.py new file mode 100644 index 000000000..2d24a4db8 --- /dev/null +++ b/exareme2/algorithms/flower/flower_data_processing.py @@ -0,0 +1,97 @@ +import json +import os +from pathlib import Path +from typing import List +from typing import Optional + +import pandas as pd +import pymonetdb +import requests +from pydantic import BaseModel +from sklearn import preprocessing +from sklearn.impute import SimpleImputer + +# Constants for project directories and environment configurations +PROJECT_ROOT = Path(__file__).resolve().parents[3] + + +class Inputdata(BaseModel): + data_model: str + datasets: List[str] + filters: Optional[dict] + y: Optional[List[str]] + x: Optional[List[str]] + + +def fetch_data(data_model, datasets, from_db=False) -> pd.DataFrame: + return ( + _fetch_data_from_db(data_model, datasets) + if from_db + else _fetch_data_from_csv(data_model, datasets) + ) + + +def _fetch_data_from_db(data_model, datasets) -> pd.DataFrame: + query = f'SELECT * FROM "{data_model}"."primary_data"' + conn = pymonetdb.connect( + hostname=os.getenv("MONETDB_IP"), + port=int(os.getenv("MONETDB_PORT")), + username=os.getenv("MONETDB_USERNAME"), + password=os.getenv("MONETDB_PASSWORD"), + database=os.getenv("MONETDB_DB"), + ) + df = pd.read_sql(query, conn) + conn.close() + df = df[df["dataset"].isin(datasets)] + return df + + +def _fetch_data_from_csv(data_model, datasets) -> pd.DataFrame: + data_folder = ( + PROJECT_ROOT / "tests" / "test_data" / f"{data_model.split(':')[0]}_v_0_1" + ) + dataframes = [ + pd.read_csv(data_folder / f"{dataset}.csv") + for dataset in datasets + if (data_folder / f"{dataset}.csv").exists() + ] + return pd.concat(dataframes, ignore_index=True) + + +def preprocess_data(inputdata, full_data): + # Ensure x and y are specified and correct + if not inputdata.x or not inputdata.y: + raise ValueError("Input features 'x' and labels 'y' must be specified") + + # Select features and target based on inputdata configuration + features = full_data[inputdata.x] # This should be a DataFrame + target = full_data[inputdata.y].values.ravel() # Flatten the array if it's 2D + + # Impute missing values for features + imputer = SimpleImputer(strategy="most_frequent") + features_imputed = imputer.fit_transform(features) + + # Encode target variable + label_encoder = preprocessing.LabelEncoder() + label_encoder.fit(get_enumerations(inputdata.data_model, inputdata.y[0])) + y_train = label_encoder.transform(target) + + return features_imputed, y_train + + +def post_result(result: dict) -> None: + url = "http://127.0.0.1:5000/flower/result" + headers = {"Content-type": "application/json", "Accept": "text/plain"} + requests.post(url, data=json.dumps(result), headers=headers) + + +def get_input() -> Inputdata: + response = requests.get("http://127.0.0.1:5000/flower/input") + return Inputdata.parse_raw(response.text) + + +def get_enumerations(data_model, variable_name): + response = requests.get("http://127.0.0.1:5000/cdes_metadata") + cdes_metadata = json.loads(response.text) + enumerations = cdes_metadata[data_model][variable_name]["enumerations"] + return [code for code, label in enumerations.items()] diff --git a/exareme2/algorithms/flower/logistic_regression.json b/exareme2/algorithms/flower/logistic_regression.json new file mode 100644 index 000000000..959063d3d --- /dev/null +++ b/exareme2/algorithms/flower/logistic_regression.json @@ -0,0 +1,37 @@ +{ + "name": "logistic_regression", + "desc": "Statistical method. that models the relationship between a dependent binary variable and one or more independent variables by fitting a binary logistic curve to the observed data.", + "label": "Logistic Regression on Flower", + "enabled": true, + "type": "flower", + "inputdata": { + "y": { + "label": "Variable (dependent)", + "desc": "A unique nominal variable. The variable is converted to binary by assigning 1 to the positive class and 0 to all other classes. ", + "types": [ + "int", + "text" + ], + "stattypes": [ + "nominal" + ], + "notblank": true, + "multiple": false + }, + "x": { + "label": "Covariates (independent)", + "desc": "One or more variables. Can be numerical or nominal. For nominal variables dummy encoding is used.", + "types": [ + "real", + "int", + "text" + ], + "stattypes": [ + "numerical", + "nominal" + ], + "notblank": true, + "multiple": true + } + } +} diff --git a/exareme2/algorithms/flower/logistic_regression/__init__.py b/exareme2/algorithms/flower/logistic_regression/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/exareme2/algorithms/flower/logistic_regression/client.py b/exareme2/algorithms/flower/logistic_regression/client.py new file mode 100644 index 000000000..72620d9d4 --- /dev/null +++ b/exareme2/algorithms/flower/logistic_regression/client.py @@ -0,0 +1,49 @@ +import os +import warnings + +import flwr as fl +from sklearn.linear_model import LogisticRegression +from sklearn.metrics import log_loss +from utils import get_model_parameters +from utils import set_initial_params +from utils import set_model_params + +from exareme2.algorithms.flower.flower_data_processing import fetch_data +from exareme2.algorithms.flower.flower_data_processing import get_input +from exareme2.algorithms.flower.flower_data_processing import preprocess_data + + +class LogisticRegressionClient(fl.client.NumPyClient): + def __init__(self, model, X_train, y_train): + self.model = model + self.X_train = X_train + self.y_train = y_train + + def get_parameters(self, **kwargs): # Now accepts any keyword arguments + return get_model_parameters(self.model) + + def fit(self, parameters, config): + set_model_params(self.model, parameters) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + self.model.fit(self.X_train, self.y_train) + return get_model_parameters(self.model), len(self.X_train), {} + + def evaluate(self, parameters, config): + set_model_params(self.model, parameters) + loss = log_loss(self.y_train, self.model.predict_proba(self.X_train)) + accuracy = self.model.score(self.X_train, self.y_train) + return loss, len(self.X_train), {"accuracy": accuracy} + + +if __name__ == "__main__": + model = LogisticRegression(penalty="l2", max_iter=1, warm_start=True) + inputdata = get_input() + full_data = fetch_data(inputdata.data_model, inputdata.datasets, from_db=True) + X_train, y_train = preprocess_data(inputdata, full_data) + set_initial_params(model, X_train, full_data, inputdata) + + client = LogisticRegressionClient(model, X_train, y_train) + fl.client.start_client( + server_address=os.environ["SERVER_ADDRESS"], client=client.to_client() + ) diff --git a/exareme2/algorithms/flower/logistic_regression/server.py b/exareme2/algorithms/flower/logistic_regression/server.py new file mode 100644 index 000000000..4e91a5817 --- /dev/null +++ b/exareme2/algorithms/flower/logistic_regression/server.py @@ -0,0 +1,51 @@ +import os + +import flwr as fl +from sklearn.linear_model import LogisticRegression +from sklearn.metrics import log_loss +from utils import set_initial_params +from utils import set_model_params + +from exareme2.algorithms.flower.flower_data_processing import fetch_data +from exareme2.algorithms.flower.flower_data_processing import get_input +from exareme2.algorithms.flower.flower_data_processing import post_result +from exareme2.algorithms.flower.flower_data_processing import preprocess_data + +# TODO: NUM_OF_ROUNDS should become a parameter of the algorithm and be set on the AlgorithmRequestDTO +NUM_OF_ROUNDS = 5 + + +def fit_round(server_round: int): + """Configures the next round of training.""" + return {"server_round": server_round} + + +def get_evaluate_fn(model, X_test, y_test): + def evaluate(server_round, parameters, config): + set_model_params(model, parameters) + loss = log_loss(y_test, model.predict_proba(X_test)) + accuracy = model.score(X_test, y_test) + if server_round == NUM_OF_ROUNDS: + post_result({"accuracy": accuracy}) + print({"accuracy": accuracy}) + return loss, {"accuracy": accuracy} + + return evaluate + + +if __name__ == "__main__": + model = LogisticRegression() + inputdata = get_input() + full_data = fetch_data(inputdata.data_model, inputdata.datasets) + X_train, y_train = preprocess_data(inputdata, full_data) + set_initial_params(model, X_train, full_data, inputdata) + strategy = fl.server.strategy.FedAvg( + min_available_clients=int(os.environ["NUMBER_OF_CLIENTS"]), + evaluate_fn=get_evaluate_fn(model, X_train, y_train), + on_fit_config_fn=fit_round, + ) + fl.server.start_server( + server_address=os.environ["SERVER_ADDRESS"], + strategy=strategy, + config=fl.server.ServerConfig(num_rounds=NUM_OF_ROUNDS), + ) diff --git a/exareme2/algorithms/flower/logistic_regression/utils.py b/exareme2/algorithms/flower/logistic_regression/utils.py new file mode 100644 index 000000000..605a04af2 --- /dev/null +++ b/exareme2/algorithms/flower/logistic_regression/utils.py @@ -0,0 +1,23 @@ +import numpy as np + + +def get_model_parameters(model): + params = [model.coef_] + if model.fit_intercept: + params.append(model.intercept_) + return params + + +def set_model_params(model, params): + model.coef_ = params[0] + if model.fit_intercept: + model.intercept_ = params[1] + + +def set_initial_params(model, X_train, full_data, flower_inputdata): + model.classes_ = np.array( + [i for i in range(len(np.unique(full_data[flower_inputdata.y])))] + ) + model.coef_ = np.zeros((len(model.classes_), X_train.shape[1])) + if model.fit_intercept: + model.intercept_ = np.zeros((len(model.classes_),)) diff --git a/exareme2/algorithms/flower/mnist_logistic_regression.json b/exareme2/algorithms/flower/mnist_logistic_regression.json new file mode 100644 index 000000000..4c6f88f9a --- /dev/null +++ b/exareme2/algorithms/flower/mnist_logistic_regression.json @@ -0,0 +1,37 @@ +{ + "name": "mnist_logistic_regression", + "desc": "Statistical method. that models the relationship between a dependent binary variable and one or more independent variables by fitting a binary logistic curve to the observed data.", + "label": "Logistic Regression on Flower", + "enabled": true, + "type": "flower", + "inputdata": { + "y": { + "label": "Variable (dependent)", + "desc": "A unique nominal variable. The variable is converted to binary by assigning 1 to the positive class and 0 to all other classes. ", + "types": [ + "int", + "text" + ], + "stattypes": [ + "nominal" + ], + "notblank": true, + "multiple": false + }, + "x": { + "label": "Covariates (independent)", + "desc": "One or more variables. Can be numerical or nominal. For nominal variables dummy encoding is used.", + "types": [ + "real", + "int", + "text" + ], + "stattypes": [ + "numerical", + "nominal" + ], + "notblank": true, + "multiple": true + } + } +} diff --git a/exareme2/algorithms/flower/mnist_logistic_regression/X_test.npy b/exareme2/algorithms/flower/mnist_logistic_regression/X_test.npy new file mode 100644 index 000000000..845a372c0 Binary files /dev/null and b/exareme2/algorithms/flower/mnist_logistic_regression/X_test.npy differ diff --git a/exareme2/algorithms/flower/mnist_logistic_regression/__init__.py b/exareme2/algorithms/flower/mnist_logistic_regression/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/exareme2/algorithms/flower/mnist_logistic_regression/client.py b/exareme2/algorithms/flower/mnist_logistic_regression/client.py new file mode 100644 index 000000000..3e720bfec --- /dev/null +++ b/exareme2/algorithms/flower/mnist_logistic_regression/client.py @@ -0,0 +1,71 @@ +import os +import warnings + +import flwr as fl +import numpy as np +from sklearn.linear_model import LogisticRegression +from sklearn.metrics import log_loss + +from exareme2.algorithms.flower.mnist_logistic_regression import utils + +if __name__ == "__main__": + # Load data from file + X, y = utils.load_data() + + # Split the on edge data: 80% train, 20% test + X_train, X_test = X[: int(0.8 * len(X))], X[int(0.8 * len(X)) :] + y_train, y_test = y[: int(0.8 * len(y))], y[int(0.8 * len(y)) :] + + # Create LogisticRegression Model + model = LogisticRegression( + penalty="l2", + max_iter=1, # local epoch + warm_start=True, # prevent refreshing weights when fitting + ) + + # Setting initial parameters, akin to model.compile for keras models + utils.set_initial_params(model) + + # Define Flower client + class MnistClient(fl.client.NumPyClient): + def get_parameters(self, config): + # Ensure parameters are returned as a list of NumPy arrays + return [ + param.astype(np.float32) for param in utils.get_model_parameters(model) + ] + + def fit(self, parameters, config): + try: + utils.set_model_params(model, parameters) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + model.fit(X_train, y_train) + accuracy = model.score(X_test, y_test) + + # Ensure the parameters are extracted and formatted correctly + params = [ + param.astype(np.float32) + for param in utils.get_model_parameters(model) + ] + return_data = (params, len(X_train), {"accuracy": accuracy}) + except Exception as e: + print(f"Error during model fitting: {e}") + # On error, default to zero-initialized parameters, no training examples, and zero accuracy + zero_params = [ + np.zeros_like(param) for param in utils.get_model_parameters(model) + ] + return_data = (zero_params, 0, {"accuracy": 0.0}) + + print(f"Returning from fit: {return_data}") + return return_data + + def evaluate(self, parameters, config): + utils.set_model_params(model, parameters) + loss = log_loss(y_test, model.predict_proba(X_test)) + accuracy = model.score(X_test, y_test) + return loss, len(X_test), {"accuracy": accuracy} + + # Start Flower client + fl.client.start_client( + server_address=os.environ["SERVER_ADDRESS"], client=MnistClient().to_client() + ) diff --git a/exareme2/algorithms/flower/mnist_logistic_regression/server.py b/exareme2/algorithms/flower/mnist_logistic_regression/server.py new file mode 100644 index 000000000..2eda57067 --- /dev/null +++ b/exareme2/algorithms/flower/mnist_logistic_regression/server.py @@ -0,0 +1,53 @@ +import os +from typing import Dict + +import flwr as fl +from sklearn.linear_model import LogisticRegression +from sklearn.metrics import log_loss + +from exareme2.algorithms.flower.flower_data_processing import post_result +from exareme2.algorithms.flower.mnist_logistic_regression import utils + +NUM_OF_ROUNDS = 5 + + +def fit_round(server_round: int) -> Dict: + """Send round number to client.""" + return {"server_round": server_round} + + +def get_evaluate_fn(model: LogisticRegression): + """Return an evaluation function for server-side evaluation.""" + # Load data from file + X_test, y_test = utils.load_data() + + # The `evaluate` function will be called after every round + def evaluate(server_round, parameters: fl.common.NDArrays, config): + # Update model with the latest parameters + utils.set_model_params(model, parameters) + loss = log_loss(y_test, model.predict_proba(X_test)) + accuracy = model.score(X_test, y_test) + if server_round == NUM_OF_ROUNDS: + post_result({"accuracy": accuracy}) + print({"accuracy": accuracy}) + return loss, {"accuracy": accuracy} + + return evaluate + + +# Start Flower server for five rounds of federated learning +if __name__ == "__main__": + model = LogisticRegression() + utils.set_initial_params(model) + strategy = fl.server.strategy.FedAvg( + min_available_clients=int(os.environ["NUMBER_OF_CLIENTS"]), + min_evaluate_clients=int(os.environ["NUMBER_OF_CLIENTS"]), + min_fit_clients=int(os.environ["NUMBER_OF_CLIENTS"]), + evaluate_fn=get_evaluate_fn(model), + on_fit_config_fn=fit_round, + ) + fl.server.start_server( + server_address=os.environ["SERVER_ADDRESS"], + strategy=strategy, + config=fl.server.ServerConfig(num_rounds=NUM_OF_ROUNDS), + ) diff --git a/exareme2/algorithms/flower/mnist_logistic_regression/utils.py b/exareme2/algorithms/flower/mnist_logistic_regression/utils.py new file mode 100644 index 000000000..227e41511 --- /dev/null +++ b/exareme2/algorithms/flower/mnist_logistic_regression/utils.py @@ -0,0 +1,56 @@ +from pathlib import Path +from typing import List +from typing import Tuple +from typing import Union + +import numpy as np +from sklearn.linear_model import LogisticRegression + +XY = Tuple[np.ndarray, np.ndarray] +Dataset = Tuple[XY, XY] +LogRegParams = Union[XY, Tuple[np.ndarray]] +XYList = List[XY] + + +def load_data(): + mnist_floder = Path(__file__).parent + return np.load(mnist_floder / "X_test.npy"), np.load(mnist_floder / "y_test.npy") + + +def get_model_parameters(model): + """Returns the parameters of a sklearn LogisticRegression model""" + if model.fit_intercept: + params = (model.coef_, model.intercept_) + else: + params = (model.coef_,) + return params + + +def set_model_params( + model: LogisticRegression, params: LogRegParams +) -> LogisticRegression: + """Sets the parameters of a sklean LogisticRegression model""" + model.coef_ = params[0] + if model.fit_intercept: + model.intercept_ = params[1] + return model + + +def set_initial_params(model: LogisticRegression): + """ + Sets initial parameters as zeros + """ + n_classes = 10 # MNIST has 10 classes + n_features = 784 # Number of features in dataset + model.classes_ = np.array([i for i in range(10)]) + + model.coef_ = np.zeros((n_classes, n_features)) + if model.fit_intercept: + model.intercept_ = np.zeros((n_classes,)) + + +def partition(X: np.ndarray, y: np.ndarray, num_partitions: int) -> XYList: + """Split X and y into a number of partitions.""" + return list( + zip(np.array_split(X, num_partitions), np.array_split(y, num_partitions)) + ) diff --git a/exareme2/algorithms/flower/mnist_logistic_regression/y_test.npy b/exareme2/algorithms/flower/mnist_logistic_regression/y_test.npy new file mode 100644 index 000000000..21a540d6e Binary files /dev/null and b/exareme2/algorithms/flower/mnist_logistic_regression/y_test.npy differ diff --git a/exareme2/algorithms/flower/process_manager.py b/exareme2/algorithms/flower/process_manager.py new file mode 100644 index 000000000..7ea731a01 --- /dev/null +++ b/exareme2/algorithms/flower/process_manager.py @@ -0,0 +1,109 @@ +import os +import subprocess +from pathlib import Path + +import psutil + +ALGORITHMS_ROOT = Path(__file__).parent + + +def process_status(proc): + """Check the status of a process.""" + try: + if not proc.is_running(): + return "not running" + if proc.status() == psutil.STATUS_ZOMBIE: + return "zombie" + return "running" + except psutil.NoSuchProcess: + return "does not exist" + + +def handle_zombie(proc, logger): + """Clean up a zombie process.""" + if proc.status() == psutil.STATUS_ZOMBIE: + logger.info(f"Attempting to reap zombie process {proc.pid}") + try: + os.waitpid(proc.pid, 0) + logger.info(f"Zombie process {proc.pid} reaped successfully.") + except ChildProcessError: + logger.error( + f"Zombie process {proc.pid} could not be reaped. It may have been reaped already." + ) + except Exception as e: + logger.error(f"Error reaping zombie PID {proc.pid}: {e}") + + +def send_signal(proc, signal, timeout, logger): + """Send a signal to the process and wait for it to terminate.""" + try: + (proc.terminate if signal == "TERM" else proc.kill)() + proc.wait(timeout=timeout) + logger.info(f"Process {proc.pid} terminated with {signal}.") + return True + except psutil.TimeoutExpired: + logger.warning( + f"Process {proc.pid} did not terminate after {signal}. Trying again..." + ) + return False + except psutil.NoSuchProcess: + logger.info(f"Process {proc.pid} no longer exists.") + return True + + +def terminate_process(proc, logger, max_attempts=3, wait_time=10): + """Attempt to terminate the process with a limited number of retries, handling zombie processes.""" + + for attempt in range(max_attempts): + status = process_status(proc) + if status in ["not running", "does not exist"]: + logger.info(f"Process {proc.pid} is already terminated.") + return + if status == "zombie": + return handle_zombie(proc, logger) + + signal = "TERM" if attempt < max_attempts - 1 else "KILL" + timeout = wait_time if signal == "TERM" else None + if send_signal(proc, signal, timeout, logger): + return + + logger.error(f"Failed to terminate PID {proc.pid} after {max_attempts} attempts.") + + +class FlowerProcess: + def __init__(self, file, parameters=None, env_vars=None, stdout=None, stderr=None): + self.file = file + self.parameters = parameters if parameters is not None else [] + self.env_vars = env_vars if env_vars is not None else {} + self.stdout = stdout + self.stderr = stderr + self.proc = None + + def start(self, logger): + if self.proc is not None: + logger.error("Process already started!") + raise RuntimeError("Process already started!") + flower_executable = ALGORITHMS_ROOT / self.file + env = {**os.environ, **{k: str(v) for k, v in self.env_vars.items()}} + command = ["poetry", "run", "python", str(flower_executable), *self.parameters] + logger.info(f"Executing command: {command}") + self.proc = subprocess.Popen( + command, env=env, stdout=self.stdout, stderr=self.stderr + ) + return self.proc.pid + + @classmethod + def kill_process(cls, pid, algorithm_name, logger): + """Terminate a process based on the algorithm name, with logging.""" + try: + proc = psutil.Process(pid) + command_line = " ".join(proc.cmdline()) + logger.info(f"Command line for PID {pid}: {command_line}") + if algorithm_name.lower() in command_line.lower(): + terminate_process(proc, logger) + except psutil.NoSuchProcess: + logger.warn(f"No process found with PID {pid}. It may have already exited.") + except psutil.AccessDenied: + logger.error(f"Access denied when attempting to terminate PID {pid}.") + except Exception as e: + logger.error(f"An error occurred while managing PID {pid}: {e}") diff --git a/exareme2/controller/README.md b/exareme2/controller/README.md index a27037b28..0cacc7235 100644 --- a/exareme2/controller/README.md +++ b/exareme2/controller/README.md @@ -23,6 +23,7 @@ LOG_LEVEL=INFO FRAMEWORK_LOG_LEVEL=INFO DEPLOYMENT_TYPE=LOCAL WORKER_LANDSCAPE_AGGREGATOR_UPDATE_INTERVAL=30 +FLOWER_EXECUTION_TIMEOUT=30 LOCALWORKERS_CONFIG_FILE=/home/user/localworkers_config.json ``` diff --git a/exareme2/controller/celery/tasks_handler.py b/exareme2/controller/celery/tasks_handler.py index 27597dde2..dd1632bf6 100644 --- a/exareme2/controller/celery/tasks_handler.py +++ b/exareme2/controller/celery/tasks_handler.py @@ -34,6 +34,10 @@ "get_data_model_cdes": "exareme2.worker.worker_info.worker_info_api.get_data_model_cdes", "get_data_model_attributes": "exareme2.worker.worker_info.worker_info_api.get_data_model_attributes", "healthcheck": "exareme2.worker.worker_info.worker_info_api.healthcheck", + "start_flower_client": "exareme2.worker.flower.starter.flower_api.start_flower_client", + "start_flower_server": "exareme2.worker.flower.starter.flower_api.start_flower_server", + "stop_flower_server": "exareme2.worker.flower.cleanup.cleanup_api.stop_flower_server", + "stop_flower_client": "exareme2.worker.flower.cleanup.cleanup_api.stop_flower_client", } @@ -291,3 +295,44 @@ def queue_healthcheck_task( check_db=check_db, priority=CELERY_APP_QUEUE_MAX_PRIORITY, ) + + def start_flower_client( + self, request_id, algorithm_name, worker_id + ) -> WorkerTaskResult: + return self._queue_task( + task_signature=TASK_SIGNATURES["start_flower_client"], + request_id=request_id, + algorithm_name=algorithm_name, + worker_id=worker_id, + ) + + def start_flower_server( + self, request_id, algorithm_name, number_of_clients, worker_id + ) -> WorkerTaskResult: + return self._queue_task( + task_signature=TASK_SIGNATURES["start_flower_server"], + request_id=request_id, + algorithm_name=algorithm_name, + number_of_clients=number_of_clients, + worker_id=worker_id, + ) + + def stop_flower_server( + self, request_id, pid: int, algorithm_name: str + ) -> WorkerTaskResult: + return self._queue_task( + task_signature=TASK_SIGNATURES["stop_flower_server"], + request_id=request_id, + pid=pid, + algorithm_name=algorithm_name, + ) + + def stop_flower_client( + self, request_id, pid: int, algorithm_name: str + ) -> WorkerTaskResult: + return self._queue_task( + task_signature=TASK_SIGNATURES["stop_flower_client"], + request_id=request_id, + pid=pid, + algorithm_name=algorithm_name, + ) diff --git a/exareme2/controller/config.toml b/exareme2/controller/config.toml index dc4ba3f3c..401f97acc 100644 --- a/exareme2/controller/config.toml +++ b/exareme2/controller/config.toml @@ -4,6 +4,7 @@ framework_log_level = "$FRAMEWORK_LOG_LEVEL" deployment_type="$DEPLOYMENT_TYPE" worker_landscape_aggregator_update_interval="$WORKER_LANDSCAPE_AGGREGATOR_UPDATE_INTERVAL" +flower_execution_timeout="$FLOWER_EXECUTION_TIMEOUT" [cleanup] contextids_cleanup_folder="$CLEANUP_FOLDER" diff --git a/exareme2/controller/quart/endpoints.py b/exareme2/controller/quart/endpoints.py index db72aa225..a6a9ffbdf 100644 --- a/exareme2/controller/quart/endpoints.py +++ b/exareme2/controller/quart/endpoints.py @@ -1,6 +1,7 @@ from logging.config import dictConfig import pydantic +from celery.utils.serialization import jsonify from quart import Blueprint from quart import request @@ -12,6 +13,7 @@ from exareme2.controller.services.api.algorithm_spec_dtos import ( algorithm_specifications_dtos, ) +from exareme2.controller.services.flower import get_flower_execution_info from exareme2.controller.services.startup import start_background_services algorithms = Blueprint("algorithms_endpoint", __name__) @@ -89,6 +91,19 @@ async def run_algorithm(algorithm_name: str) -> str: return result +@algorithms.route("/flower/input", methods=["GET"]) +async def get_flower_input() -> dict: + return get_flower_execution_info().get_inputdata().dict() + + +@algorithms.route("/flower/result", methods=["POST"]) +async def set_flower_result(): + request_body = await request.json + await get_flower_execution_info().set_result(result=request_body) + + return jsonify({"message": "Result set successfully"}), 200 + + def configure_loggers(): """ The loggers should be initialized at app startup, otherwise the configs are overwritten. diff --git a/exareme2/controller/services/algorithm_execution.py b/exareme2/controller/services/algorithm_execution.py index 609fcf0a9..653329d24 100644 --- a/exareme2/controller/services/algorithm_execution.py +++ b/exareme2/controller/services/algorithm_execution.py @@ -1,3 +1,4 @@ +from exareme2.algorithms.specifications import AlgorithmType from exareme2.controller import config as ctrl_config from exareme2.controller.services import get_worker_landscape_aggregator from exareme2.controller.services.api.algorithm_request_dtos import AlgorithmRequestDTO @@ -8,6 +9,7 @@ from exareme2.controller.services.exareme2 import ( get_controller as get_exareme2_controller, ) +from exareme2.controller.services.flower import get_controller as get_flower_controller from exareme2.controller.uid_generator import UIDGenerator @@ -24,7 +26,12 @@ async def execute_algorithm(algo_name: str, request_dto: AlgorithmRequestDTO): smpc_enabled=ctrl_config.smpc.enabled, smpc_optional=ctrl_config.smpc.optional, ) - algorithm_result = await get_exareme2_controller().exec_algorithm( + controller = ( + get_flower_controller() + if request_dto.type == AlgorithmType.FLOWER + else get_exareme2_controller() + ) + algorithm_result = await controller.exec_algorithm( algorithm_name=algo_name, algorithm_request_dto=request_dto, ) diff --git a/exareme2/controller/services/api/algorithm_spec_dtos.py b/exareme2/controller/services/api/algorithm_spec_dtos.py index 9155e2905..998001376 100644 --- a/exareme2/controller/services/api/algorithm_spec_dtos.py +++ b/exareme2/controller/services/api/algorithm_spec_dtos.py @@ -6,7 +6,6 @@ from typing import Dict from typing import List from typing import Optional -from typing import Tuple from pydantic import BaseModel diff --git a/exareme2/controller/services/exareme2/controller.py b/exareme2/controller/services/exareme2/controller.py index cd850093a..93b65dfab 100644 --- a/exareme2/controller/services/exareme2/controller.py +++ b/exareme2/controller/services/exareme2/controller.py @@ -516,7 +516,7 @@ def _get_workers_info(self) -> List[WorkerInfo]: class ExecutionStrategy(ABC): """ ExecutionStrategy is an interface, that implements a Strategy pattern, allowing to - add arbitrary functionalilty before executing the final "Algorithm" logic, without + add arbitrary functionality before executing the final "Algorithm" logic, without having to alter the Controller.exec_algorithm method. Subclassing and implementing the abstract method run defines the desired functionality. """ diff --git a/exareme2/controller/services/flower/__init__.py b/exareme2/controller/services/flower/__init__.py index e69de29bb..e9bc2db76 100644 --- a/exareme2/controller/services/flower/__init__.py +++ b/exareme2/controller/services/flower/__init__.py @@ -0,0 +1,29 @@ +from typing import Optional + +from exareme2.controller.services.flower.controller import Controller +from exareme2.controller.services.flower.flower_io_registry import FlowerIORegistry + +_flower_execution_info: Optional[FlowerIORegistry] = None +_controller: Optional[Controller] = None + + +def set_controller(controller: Controller): + global _controller + _controller = controller + + +def get_controller() -> Controller: + global _controller + if not _controller: + raise ValueError("Controller has not been initialized.") + return _controller + + +def set_flower_execution_info(flower_execution_info: FlowerIORegistry): + global _flower_execution_info + _flower_execution_info = flower_execution_info + + +def get_flower_execution_info() -> FlowerIORegistry: + global _flower_execution_info + return _flower_execution_info diff --git a/exareme2/controller/services/flower/controller.py b/exareme2/controller/services/flower/controller.py new file mode 100644 index 000000000..69160af60 --- /dev/null +++ b/exareme2/controller/services/flower/controller.py @@ -0,0 +1,125 @@ +import asyncio +from typing import List + +from exareme2.controller import logger as ctrl_logger +from exareme2.controller.federation_info_logs import log_experiment_execution +from exareme2.controller.services.flower.tasks_handler import FlowerTasksHandler +from exareme2.controller.uid_generator import UIDGenerator +from exareme2.worker_communication import WorkerInfo + + +# Base Exception class for Worker-related exceptions +class WorkerException(Exception): + pass + + +class WorkerUnresponsiveException(WorkerException): + def __init__(self): + super().__init__("One of the workers stopped responding") + + +class WorkerTaskTimeoutException(WorkerException): + def __init__(self, timeout): + super().__init__( + f"Task took longer than {timeout} seconds. Increase timeout or try again." + ) + + +# Controller class +class Controller: + def __init__( + self, worker_landscape_aggregator, flower_execution_info, task_timeout + ): + self.worker_landscape_aggregator = worker_landscape_aggregator + self.flower_execution_info = flower_execution_info + self.task_timeout = task_timeout + self.lock = asyncio.Lock() + + def _create_worker_tasks_handler(self, request_id, worker_info: WorkerInfo): + worker_addr = f"{worker_info.ip}:{worker_info.port}" + worker_db_addr = f"{worker_info.db_ip}:{worker_info.db_port}" + return FlowerTasksHandler( + request_id, + worker_id=worker_info.id, + worker_queue_addr=worker_addr, + worker_db_addr=worker_db_addr, + tasks_timeout=self.task_timeout, + ) + + async def exec_algorithm(self, algorithm_name, algorithm_request_dto): + async with self.lock: + request_id = algorithm_request_dto.request_id + context_id = UIDGenerator().get_a_uid() + logger = ctrl_logger.get_request_logger(request_id) + workers_info = self._get_workers_info_by_dataset( + algorithm_request_dto.inputdata.data_model, + algorithm_request_dto.inputdata.datasets, + ) + task_handlers = [ + self._create_worker_tasks_handler(request_id, worker) + for worker in workers_info + ] + server_task_handler = ( + task_handlers[0] + if len(task_handlers) == 1 + else self._create_global_handler(request_id) + ) + self.flower_execution_info.set_inputdata( + inputdata=algorithm_request_dto.inputdata + ) + server_pid = None + clients_pids = {} + + try: + server_pid = server_task_handler.start_flower_server( + algorithm_name, len(task_handlers) + ) + clients_pids = { + handler.start_flower_client(algorithm_name): handler + for handler in task_handlers + } + + log_experiment_execution( + logger, + request_id, + context_id, + algorithm_name, + algorithm_request_dto.inputdata.datasets, + algorithm_request_dto.parameters, + [info.id for info in workers_info], + ) + result = await self.flower_execution_info.get_result_with_timeout() + + logger.info(f"Finished execution -> {algorithm_name} with {request_id}") + return result + + except asyncio.TimeoutError: + raise WorkerTaskTimeoutException(self.task_timeout) + finally: + await self._cleanup( + algorithm_name, server_task_handler, server_pid, clients_pids + ) + + def _create_global_handler(self, request_id): + global_worker = self.worker_landscape_aggregator.get_global_worker() + return self._create_worker_tasks_handler(request_id, global_worker) + + async def _cleanup( + self, algorithm_name, server_task_handler, server_pid, clients_pids + ): + await self.flower_execution_info.reset() + server_task_handler.stop_flower_server(server_pid, algorithm_name) + for pid, handler in clients_pids.items(): + handler.stop_flower_client(pid, algorithm_name) + + def _get_workers_info_by_dataset(self, data_model, datasets) -> List[WorkerInfo]: + """Retrieves worker information for those handling the specified datasets.""" + worker_ids = ( + self.worker_landscape_aggregator.get_worker_ids_with_any_of_datasets( + data_model, datasets + ) + ) + return [ + self.worker_landscape_aggregator.get_worker_info(worker_id) + for worker_id in worker_ids + ] diff --git a/exareme2/controller/services/flower/flower_io_registry.py b/exareme2/controller/services/flower/flower_io_registry.py new file mode 100644 index 000000000..dd37da98c --- /dev/null +++ b/exareme2/controller/services/flower/flower_io_registry.py @@ -0,0 +1,88 @@ +import asyncio +from enum import Enum +from enum import unique +from typing import Any +from typing import Dict +from typing import Optional + +from exareme2.controller.services.api.algorithm_request_dtos import ( + AlgorithmInputDataDTO, +) + + +@unique +class Status(str, Enum): + SUCCESS = "success" + RUNNING = "running" + FAILURE = "failure" + + +class Result: + def __init__(self, content: Dict[str, Any], status: Status): + self.content = content + self.status = status + + def __repr__(self): + return f"Result(status={self.status}, content={self.content})" + + +class FlowerIORegistry: + def __init__(self, timeout, logger): + self._inputdata: Optional[AlgorithmInputDataDTO] = None + self._result: Optional[Result] = None + self.result_ready: Optional[asyncio.Event] = None + self._logger = logger + self._reset_sync() + self._timeout = timeout + + def _reset_sync(self): + """Synchronously resets the algorithm execution info to initial state.""" + self._inputdata = AlgorithmInputDataDTO(data_model="", datasets=[]) + self._result = Result(content={}, status=Status.RUNNING) + self.result_ready = asyncio.Event() + self._logger.debug("Algorithm reset: input data cleared, status set to RUNNING") + + async def reset(self): + """Asynchronously resets the algorithm execution info to initial state.""" + self._reset_sync() + self._logger.debug("Asynchronous reset complete") + + async def set_result(self, result: Dict[str, Any]): + """Sets the execution result and updates the status based on the presence of an error.""" + status = Status.FAILURE if "error" in result else Status.SUCCESS + self._result = Result(content=result, status=status) + self._logger.debug(f"Result set with status: {status}, content: {result}") + self.result_ready.set() + + async def get_result(self) -> Dict[str, Any]: + await self.result_ready.wait() + self._logger.debug(f"Result retrieved: {self._result}") + return self._result.content + + async def get_result_with_timeout(self) -> Dict[str, Any]: + try: + await asyncio.wait_for(self.get_result(), self._timeout) + except asyncio.TimeoutError: + error = f"Failed to get result: operation timed out after {self._timeout} seconds" + self._logger.error(error) + self._result = Result(content={"error": error}, status=Status.FAILURE) + self._logger.debug(f"Result with timeout: {self._result}") + return self._result.content + + def get_status(self) -> Status: + """Returns the current status of the execution.""" + status = self._result.status if self._result else Status.RUNNING + self._logger.debug(f"Status retrieved: {status}") + return status + + def set_inputdata(self, inputdata: AlgorithmInputDataDTO): + """Sets new input data for the algorithm and resets status and result.""" + self._inputdata = inputdata + self._result = Result(content={}, status=Status.RUNNING) + self.result_ready.clear() + self._logger.debug(f"Input data updated: {inputdata}, status reset to RUNNING") + + def get_inputdata(self) -> AlgorithmInputDataDTO: + """Returns the current input data.""" + self._logger.debug(f"Input data retrieved: {self._inputdata}") + return self._inputdata diff --git a/exareme2/controller/services/flower/tasks_handler.py b/exareme2/controller/services/flower/tasks_handler.py index e69de29bb..b8506a875 100644 --- a/exareme2/controller/services/flower/tasks_handler.py +++ b/exareme2/controller/services/flower/tasks_handler.py @@ -0,0 +1,50 @@ +from exareme2.controller import logger as ctrl_logger +from exareme2.controller.celery.tasks_handler import WorkerTasksHandler + + +class FlowerTasksHandler: + def __init__( + self, + request_id: str, + worker_id: str, + worker_queue_addr: str, + worker_db_addr: str, + tasks_timeout: int, + ): + self._request_id = request_id + self._worker_id = worker_id + self._worker_queue_addr = worker_queue_addr + self._db_address = worker_db_addr + self._tasks_timeout = tasks_timeout + self._logger = ctrl_logger.get_request_logger(request_id=request_id) + self._worker_tasks_handler = WorkerTasksHandler( + self._worker_queue_addr, self._logger + ) + + @property + def worker_id(self) -> str: + return self._worker_id + + @property + def worker_data_address(self) -> str: + return self._db_address + + def start_flower_client(self, algorithm_name) -> int: + return self._worker_tasks_handler.start_flower_client( + self._request_id, algorithm_name, self._worker_id + ).get(timeout=self._tasks_timeout) + + def start_flower_server(self, algorithm_name: str, number_of_clients: int) -> int: + return self._worker_tasks_handler.start_flower_server( + self._request_id, algorithm_name, number_of_clients, self._worker_id + ).get(timeout=self._tasks_timeout) + + def stop_flower_server(self, pid: int, algorithm_name: str): + self._worker_tasks_handler.stop_flower_server( + self._request_id, pid, algorithm_name + ).get(timeout=self._tasks_timeout) + + def stop_flower_client(self, pid: int, algorithm_name: str): + self._worker_tasks_handler.stop_flower_client( + self._request_id, pid, algorithm_name + ).get(timeout=self._tasks_timeout) diff --git a/exareme2/controller/services/startup.py b/exareme2/controller/services/startup.py index 79ce17841..b927a7771 100644 --- a/exareme2/controller/services/startup.py +++ b/exareme2/controller/services/startup.py @@ -1,6 +1,5 @@ from exareme2.controller import config as ctrl_config from exareme2.controller import logger as ctrl_logger -from exareme2.controller.services import WorkerLandscapeAggregator from exareme2.controller.services import set_worker_landscape_aggregator from exareme2.controller.services.exareme2 import set_cleaner from exareme2.controller.services.exareme2 import ( @@ -11,6 +10,15 @@ Controller as Exareme2Controller, ) from exareme2.controller.services.exareme2.execution_engine import SMPCParams +from exareme2.controller.services.flower import set_controller as set_flower_controller +from exareme2.controller.services.flower import set_flower_execution_info +from exareme2.controller.services.flower.controller import ( + Controller as FlowerController, +) +from exareme2.controller.services.flower.flower_io_registry import FlowerIORegistry +from exareme2.controller.services.worker_landscape_aggregator.worker_landscape_aggregator import ( + WorkerLandscapeAggregator, +) from exareme2.smpc_cluster_communication import DifferentialPrivacyParams @@ -56,3 +64,16 @@ def start_background_services(): ) controller.start_cleanup_loop() set_exareme2_controller(controller) + + flower_execution_info = FlowerIORegistry( + ctrl_config.flower_execution_timeout, + ctrl_logger.get_background_service_logger(), + ) + set_flower_execution_info(flower_execution_info) + + controller = FlowerController( + flower_execution_info=flower_execution_info, + worker_landscape_aggregator=worker_landscape_aggregator, + task_timeout=ctrl_config.rabbitmq.celery_tasks_timeout, + ) + set_flower_controller(controller) diff --git a/exareme2/worker/flower/cleanup/__init__.py b/exareme2/worker/flower/cleanup/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/exareme2/worker/flower/cleanup/cleanup_api.py b/exareme2/worker/flower/cleanup/cleanup_api.py new file mode 100644 index 000000000..5e19813a4 --- /dev/null +++ b/exareme2/worker/flower/cleanup/cleanup_api.py @@ -0,0 +1,13 @@ +from celery import shared_task + +from exareme2.worker.flower.cleanup import cleanup_service + + +@shared_task +def stop_flower_server(request_id: str, pid: int, algorithm_name: str): + cleanup_service.stop_flower_process(request_id, pid, algorithm_name) + + +@shared_task +def stop_flower_client(request_id: str, pid: int, algorithm_name: str): + cleanup_service.stop_flower_process(request_id, pid, algorithm_name) diff --git a/exareme2/worker/flower/cleanup/cleanup_service.py b/exareme2/worker/flower/cleanup/cleanup_service.py new file mode 100644 index 000000000..15380fcc6 --- /dev/null +++ b/exareme2/worker/flower/cleanup/cleanup_service.py @@ -0,0 +1,9 @@ +from exareme2.algorithms.flower.process_manager import FlowerProcess +from exareme2.worker.utils.logger import get_logger +from exareme2.worker.utils.logger import initialise_logger + + +@initialise_logger +def stop_flower_process(request_id: str, pid: int, algorithm_name): + logger = get_logger() + FlowerProcess.kill_process(pid, algorithm_name, logger) diff --git a/exareme2/worker/flower/starter/__init__.py b/exareme2/worker/flower/starter/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/exareme2/worker/flower/starter/flower_api.py b/exareme2/worker/flower/starter/flower_api.py new file mode 100644 index 000000000..1b132dbab --- /dev/null +++ b/exareme2/worker/flower/starter/flower_api.py @@ -0,0 +1,17 @@ +from celery import shared_task + +from exareme2.worker.flower.starter import flower_service + + +@shared_task +def start_flower_client(request_id: str, algorithm_name, worker_id) -> int: + return flower_service.start_flower_client(request_id, algorithm_name, worker_id) + + +@shared_task +def start_flower_server( + request_id: str, algorithm_name: str, number_of_clients: int, worker_id +) -> int: + return flower_service.start_flower_server( + request_id, algorithm_name, number_of_clients, worker_id + ) diff --git a/exareme2/worker/flower/starter/flower_service.py b/exareme2/worker/flower/starter/flower_service.py new file mode 100644 index 000000000..bdd243a9b --- /dev/null +++ b/exareme2/worker/flower/starter/flower_service.py @@ -0,0 +1,53 @@ +from exareme2 import ALGORITHM_FOLDERS +from exareme2.algorithms.flower.process_manager import FlowerProcess +from exareme2.worker import config as worker_config +from exareme2.worker.utils.logger import get_logger +from exareme2.worker.utils.logger import initialise_logger + +# Dictionary to keep track of running processes +running_processes = {} +SERVER_ADDRESS = "0.0.0.0:8080" + + +@initialise_logger +def start_flower_client(request_id: str, algorithm_name, worker_id) -> int: + env_vars = { + "MONETDB_IP": worker_config.monetdb.ip, + "MONETDB_PORT": worker_config.monetdb.port, + "MONETDB_USERNAME": worker_config.monetdb.local_username, + "MONETDB_PASSWORD": worker_config.monetdb.local_password, + "MONETDB_DB": worker_config.monetdb.database, + "SERVER_ADDRESS": SERVER_ADDRESS, + "NUMBER_OF_CLIENTS": worker_config.monetdb.database, + } + with open(f"/tmp/exareme2/{worker_id}.out", "a") as f: + process = FlowerProcess( + f"{algorithm_name}/client.py", env_vars=env_vars, stderr=f, stdout=f + ) + running_processes[request_id] = process + logger = get_logger() + + logger.info("Starting client.py") + pid = process.start(logger) + logger.info(f"Started client.py process id: {pid}") + return pid + + +@initialise_logger +def start_flower_server( + request_id: str, algorithm_name: str, number_of_clients: int, worker_id +) -> int: + env_vars = { + "SERVER_ADDRESS": SERVER_ADDRESS, + "NUMBER_OF_CLIENTS": number_of_clients, + } + with open(f"/tmp/exareme2/{worker_id}.out", "a") as f: + process = FlowerProcess( + f"{algorithm_name}/server.py", env_vars=env_vars, stderr=f, stdout=f + ) + running_processes[request_id] = process + logger = get_logger() + logger.info("Starting server.py") + pid = process.start(logger) + logger.info(f"Started server.py process id: {pid}") + return pid diff --git a/exareme2/worker/utils/celery_app.py b/exareme2/worker/utils/celery_app.py index 0e102f2ce..80044af19 100644 --- a/exareme2/worker/utils/celery_app.py +++ b/exareme2/worker/utils/celery_app.py @@ -29,6 +29,8 @@ "exareme2.worker.exareme2.udfs.udfs_api", "exareme2.worker.exareme2.smpc.smpc_api", "exareme2.worker.exareme2.cleanup.cleanup_api", + "exareme2.worker.flower.starter.flower_api", + "exareme2.worker.flower.cleanup.cleanup_api", ], ) worker_logger.info("Celery app created.") diff --git a/kubernetes/templates/exareme2-controller.yaml b/kubernetes/templates/exareme2-controller.yaml index 4005d5793..bafe5ae7e 100644 --- a/kubernetes/templates/exareme2-controller.yaml +++ b/kubernetes/templates/exareme2-controller.yaml @@ -37,6 +37,8 @@ spec: value: "KUBERNETES" - name: WORKER_LANDSCAPE_AGGREGATOR_UPDATE_INTERVAL value: {{ quote .Values.controller.worker_landscape_aggregator_update_interval }} + - name: FLOWER_EXECUTION_TIMEOUT + value: {{ quote .Values.controller.flower_execution_timeout }} - name: WORKERS_CLEANUP_INTERVAL value: {{ quote .Values.controller.workers_cleanup_interval }} - name: WORKERS_CLEANUP_CONTEXTID_RELEASE_TIMELIMIT diff --git a/kubernetes/values.yaml b/kubernetes/values.yaml index 790dcc1d4..e5d87b2d3 100644 --- a/kubernetes/values.yaml +++ b/kubernetes/values.yaml @@ -19,6 +19,7 @@ db: controller: worker_landscape_aggregator_update_interval: 30 + flower_execution_timeout: 30 celery_tasks_timeout: 300 workers_cleanup_interval: 60 cleanup_file_folder: /opt/cleanup diff --git a/poetry.lock b/poetry.lock index eabefac96..c934a312b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "aiofiles" version = "23.2.1" description = "File support for asyncio." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -16,7 +15,6 @@ files = [ name = "amqp" version = "5.1.1" description = "Low-level AMQP client for Python (fork of amqplib)." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -31,7 +29,6 @@ vine = ">=5.0.0" name = "appnope" version = "0.1.3" description = "Disable App Nap on macOS >= 10.9" -category = "dev" optional = false python-versions = "*" files = [ @@ -43,7 +40,6 @@ files = [ name = "asgiref" version = "3.7.2" description = "ASGI specs, helper code, and adapters" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -61,7 +57,6 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] name = "astor" version = "0.8.1" description = "Read/rewrite/write Python ASTs" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" files = [ @@ -73,7 +68,6 @@ files = [ name = "astroid" version = "2.15.8" description = "An abstract syntax tree for Python with inference support." -category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -90,7 +84,6 @@ wrapt = {version = ">=1.11,<2", markers = "python_version < \"3.11\""} name = "asttokens" version = "2.4.0" description = "Annotate AST trees with source code positions" -category = "dev" optional = false python-versions = "*" files = [ @@ -108,7 +101,6 @@ test = ["astroid", "pytest"] name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -127,7 +119,6 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "backcall" version = "0.2.0" description = "Specifications for callback functions passed in to an API" -category = "dev" optional = false python-versions = "*" files = [ @@ -139,7 +130,6 @@ files = [ name = "backports-zoneinfo" version = "0.2.1" description = "Backport of the standard library zoneinfo module" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -171,7 +161,6 @@ tzdata = ["tzdata"] name = "billiard" version = "4.1.0" description = "Python multiprocessing fork with improvements and bugfixes" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -183,7 +172,6 @@ files = [ name = "black" version = "23.7.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -230,7 +218,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "blinker" version = "1.5" description = "Fast, simple object-to-object and broadcast signaling" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -242,7 +229,6 @@ files = [ name = "celery" version = "5.3.4" description = "Distributed Task Queue." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -299,7 +285,6 @@ zstd = ["zstandard (==0.21.0)"] name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -307,11 +292,74 @@ files = [ {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, ] +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + [[package]] name = "cfgv" version = "3.4.0" description = "Validate configuration and produce human readable error messages." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -323,7 +371,6 @@ files = [ name = "charset-normalizer" version = "3.2.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -408,7 +455,6 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -423,7 +469,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "click-didyoumean" version = "0.3.0" description = "Enables git-like *did-you-mean* feature in click" -category = "main" optional = false python-versions = ">=3.6.2,<4.0.0" files = [ @@ -438,7 +483,6 @@ click = ">=7" name = "click-plugins" version = "1.1.1" description = "An extension module for click to enable registering CLI commands via setuptools entry-points." -category = "main" optional = false python-versions = "*" files = [ @@ -456,7 +500,6 @@ dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] name = "click-repl" version = "0.3.0" description = "REPL plugin for Click" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -475,7 +518,6 @@ testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -487,7 +529,6 @@ files = [ name = "coverage" version = "7.2.7" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -559,11 +600,55 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli"] +[[package]] +name = "cryptography" +version = "41.0.7" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, + {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, + {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, + {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, +] + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +nox = ["nox"] +pep8test = ["black", "check-sdist", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + [[package]] name = "decorator" version = "5.1.1" description = "Decorators for Humans" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -575,7 +660,6 @@ files = [ name = "devtools" version = "0.11.0" description = "Python's missing debug print command, and more." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -592,24 +676,23 @@ pygments = ["pygments (>=2.2.0)"] [[package]] name = "dill" -version = "0.3.7" +version = "0.3.8" description = "serialize all of Python" -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, - {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, + {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, + {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, ] [package.extras] graph = ["objgraph (>=1.7.2)"] +profile = ["gprof2dot (>=2022.7.29)"] [[package]] name = "distlib" version = "0.3.7" description = "Distribution utilities" -category = "dev" optional = false python-versions = "*" files = [ @@ -621,7 +704,6 @@ files = [ name = "dnspython" version = "2.3.0" description = "DNS toolkit" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -642,7 +724,6 @@ wmi = ["wmi (>=1.5.1,<2.0.0)"] name = "docker" version = "6.1.3" description = "A Python library for the Docker Engine API." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -664,7 +745,6 @@ ssh = ["paramiko (>=2.4.3)"] name = "envtoml" version = "0.1.2" description = "A simple way of using environment variables in TOML configs (via interpolation)" -category = "main" optional = false python-versions = ">=3.5,<4.0" files = [ @@ -679,7 +759,6 @@ toml = ">=0.10.0,<0.11.0" name = "eventlet" version = "0.33.3" description = "Highly concurrent networking library" -category = "main" optional = false python-versions = "*" files = [ @@ -696,7 +775,6 @@ six = ">=1.10.0" name = "exceptiongroup" version = "1.1.3" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -711,7 +789,6 @@ test = ["pytest (>=6)"] name = "execnet" version = "2.0.2" description = "execnet: rapid multi-Python deployment" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -726,7 +803,6 @@ testing = ["hatch", "pre-commit", "pytest", "tox"] name = "executing" version = "1.2.0" description = "Get the currently executing AST node of a frame, and other information" -category = "dev" optional = false python-versions = "*" files = [ @@ -741,7 +817,6 @@ tests = ["asttokens", "littleutils", "pytest", "rich"] name = "fancycompleter" version = "0.9.1" description = "colorful TAB completion for Python prompt" -category = "dev" optional = false python-versions = "*" files = [ @@ -757,7 +832,6 @@ pyrepl = ">=0.8.2" name = "filelock" version = "3.12.4" description = "A platform independent file lock." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -770,11 +844,33 @@ docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1 testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] typing = ["typing-extensions (>=4.7.1)"] +[[package]] +name = "flwr" +version = "1.7.0" +description = "Flower: A Friendly Federated Learning Framework" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "flwr-1.7.0-py3-none-any.whl", hash = "sha256:5240ab636c33bc94f7d9d46b7a24a6359a5588e2d9ae1c6defac7a0db2bc28eb"}, + {file = "flwr-1.7.0.tar.gz", hash = "sha256:8c2c63068038fd7bd2f704079a7710195c500408d010d291662e6ca018936d21"}, +] + +[package.dependencies] +cryptography = ">=41.0.2,<42.0.0" +grpcio = ">=1.60.0,<2.0.0" +iterators = ">=0.0.2,<0.0.3" +numpy = ">=1.21.0,<2.0.0" +protobuf = ">=4.25.2,<5.0.0" +pycryptodome = ">=3.18.0,<4.0.0" + +[package.extras] +rest = ["requests (>=2.31.0,<3.0.0)", "starlette (>=0.31.0,<0.32.0)", "uvicorn[standard] (>=0.23.0,<0.24.0)"] +simulation = ["pydantic (<2.0.0)", "ray (==2.6.3)"] + [[package]] name = "freezegun" version = "1.2.2" description = "Let your Python tests travel through time" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -789,7 +885,6 @@ python-dateutil = ">=2.7" name = "greenlet" version = "2.0.2" description = "Lightweight in-process concurrent programming" -category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" files = [ @@ -798,6 +893,7 @@ files = [ {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, + {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"}, {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, @@ -806,6 +902,7 @@ files = [ {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, + {file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, @@ -835,6 +932,7 @@ files = [ {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, + {file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, @@ -843,6 +941,7 @@ files = [ {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, + {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"}, {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, @@ -859,11 +958,76 @@ files = [ docs = ["Sphinx", "docutils (<0.18)"] test = ["objgraph", "psutil"] +[[package]] +name = "grpcio" +version = "1.62.1" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpcio-1.62.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:179bee6f5ed7b5f618844f760b6acf7e910988de77a4f75b95bbfaa8106f3c1e"}, + {file = "grpcio-1.62.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:48611e4fa010e823ba2de8fd3f77c1322dd60cb0d180dc6630a7e157b205f7ea"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:b2a0e71b0a2158aa4bce48be9f8f9eb45cbd17c78c7443616d00abbe2a509f6d"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbe80577c7880911d3ad65e5ecc997416c98f354efeba2f8d0f9112a67ed65a5"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58f6c693d446964e3292425e1d16e21a97a48ba9172f2d0df9d7b640acb99243"}, + {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:77c339403db5a20ef4fed02e4d1a9a3d9866bf9c0afc77a42234677313ea22f3"}, + {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b5a4ea906db7dec694098435d84bf2854fe158eb3cd51e1107e571246d4d1d70"}, + {file = "grpcio-1.62.1-cp310-cp310-win32.whl", hash = "sha256:4187201a53f8561c015bc745b81a1b2d278967b8de35f3399b84b0695e281d5f"}, + {file = "grpcio-1.62.1-cp310-cp310-win_amd64.whl", hash = "sha256:844d1f3fb11bd1ed362d3fdc495d0770cfab75761836193af166fee113421d66"}, + {file = "grpcio-1.62.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:833379943d1728a005e44103f17ecd73d058d37d95783eb8f0b28ddc1f54d7b2"}, + {file = "grpcio-1.62.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:c7fcc6a32e7b7b58f5a7d27530669337a5d587d4066060bcb9dee7a8c833dfb7"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:fa7d28eb4d50b7cbe75bb8b45ed0da9a1dc5b219a0af59449676a29c2eed9698"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48f7135c3de2f298b833be8b4ae20cafe37091634e91f61f5a7eb3d61ec6f660"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71f11fd63365ade276c9d4a7b7df5c136f9030e3457107e1791b3737a9b9ed6a"}, + {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b49fd8fe9f9ac23b78437da94c54aa7e9996fbb220bac024a67469ce5d0825f"}, + {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:482ae2ae78679ba9ed5752099b32e5fe580443b4f798e1b71df412abf43375db"}, + {file = "grpcio-1.62.1-cp311-cp311-win32.whl", hash = "sha256:1faa02530b6c7426404372515fe5ddf66e199c2ee613f88f025c6f3bd816450c"}, + {file = "grpcio-1.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bd90b8c395f39bc82a5fb32a0173e220e3f401ff697840f4003e15b96d1befc"}, + {file = "grpcio-1.62.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:b134d5d71b4e0837fff574c00e49176051a1c532d26c052a1e43231f252d813b"}, + {file = "grpcio-1.62.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d1f6c96573dc09d50dbcbd91dbf71d5cf97640c9427c32584010fbbd4c0e0037"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:359f821d4578f80f41909b9ee9b76fb249a21035a061a327f91c953493782c31"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a485f0c2010c696be269184bdb5ae72781344cb4e60db976c59d84dd6354fac9"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b50b09b4dc01767163d67e1532f948264167cd27f49e9377e3556c3cba1268e1"}, + {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3227c667dccbe38f2c4d943238b887bac588d97c104815aecc62d2fd976e014b"}, + {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3952b581eb121324853ce2b191dae08badb75cd493cb4e0243368aa9e61cfd41"}, + {file = "grpcio-1.62.1-cp312-cp312-win32.whl", hash = "sha256:83a17b303425104d6329c10eb34bba186ffa67161e63fa6cdae7776ff76df73f"}, + {file = "grpcio-1.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:6696ffe440333a19d8d128e88d440f91fb92c75a80ce4b44d55800e656a3ef1d"}, + {file = "grpcio-1.62.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:e3393b0823f938253370ebef033c9fd23d27f3eae8eb9a8f6264900c7ea3fb5a"}, + {file = "grpcio-1.62.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:83e7ccb85a74beaeae2634f10eb858a0ed1a63081172649ff4261f929bacfd22"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:882020c87999d54667a284c7ddf065b359bd00251fcd70279ac486776dbf84ec"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a10383035e864f386fe096fed5c47d27a2bf7173c56a6e26cffaaa5a361addb1"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:960edebedc6b9ada1ef58e1c71156f28689978188cd8cff3b646b57288a927d9"}, + {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:23e2e04b83f347d0aadde0c9b616f4726c3d76db04b438fd3904b289a725267f"}, + {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:978121758711916d34fe57c1f75b79cdfc73952f1481bb9583399331682d36f7"}, + {file = "grpcio-1.62.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9084086190cc6d628f282e5615f987288b95457292e969b9205e45b442276407"}, + {file = "grpcio-1.62.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:22bccdd7b23c420a27fd28540fb5dcbc97dc6be105f7698cb0e7d7a420d0e362"}, + {file = "grpcio-1.62.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:8999bf1b57172dbc7c3e4bb3c732658e918f5c333b2942243f10d0d653953ba9"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:d9e52558b8b8c2f4ac05ac86344a7417ccdd2b460a59616de49eb6933b07a0bd"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1714e7bc935780bc3de1b3fcbc7674209adf5208ff825799d579ffd6cd0bd505"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8842ccbd8c0e253c1f189088228f9b433f7a93b7196b9e5b6f87dba393f5d5d"}, + {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f1e7b36bdff50103af95a80923bf1853f6823dd62f2d2a2524b66ed74103e49"}, + {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bba97b8e8883a8038606480d6b6772289f4c907f6ba780fa1f7b7da7dfd76f06"}, + {file = "grpcio-1.62.1-cp38-cp38-win32.whl", hash = "sha256:a7f615270fe534548112a74e790cd9d4f5509d744dd718cd442bf016626c22e4"}, + {file = "grpcio-1.62.1-cp38-cp38-win_amd64.whl", hash = "sha256:e6c8c8693df718c5ecbc7babb12c69a4e3677fd11de8886f05ab22d4e6b1c43b"}, + {file = "grpcio-1.62.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:73db2dc1b201d20ab7083e7041946910bb991e7e9761a0394bbc3c2632326483"}, + {file = "grpcio-1.62.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:407b26b7f7bbd4f4751dbc9767a1f0716f9fe72d3d7e96bb3ccfc4aace07c8de"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f8de7c8cef9261a2d0a62edf2ccea3d741a523c6b8a6477a340a1f2e417658de"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd5c8a1af40ec305d001c60236308a67e25419003e9bb3ebfab5695a8d0b369"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be0477cb31da67846a33b1a75c611f88bfbcd427fe17701b6317aefceee1b96f"}, + {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:60dcd824df166ba266ee0cfaf35a31406cd16ef602b49f5d4dfb21f014b0dedd"}, + {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:973c49086cabab773525f6077f95e5a993bfc03ba8fc32e32f2c279497780585"}, + {file = "grpcio-1.62.1-cp39-cp39-win32.whl", hash = "sha256:12859468e8918d3bd243d213cd6fd6ab07208195dc140763c00dfe901ce1e1b4"}, + {file = "grpcio-1.62.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7209117bbeebdfa5d898205cc55153a51285757902dd73c47de498ad4d11332"}, + {file = "grpcio-1.62.1.tar.gz", hash = "sha256:6c455e008fa86d9e9a9d85bb76da4277c0d7d9668a3bfa70dbe86e9f3c759947"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.62.1)"] + [[package]] name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -875,7 +1039,6 @@ files = [ name = "h2" version = "4.1.0" description = "HTTP/2 State-Machine based protocol implementation" -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -891,7 +1054,6 @@ hyperframe = ">=6.0,<7" name = "hpack" version = "4.0.0" description = "Pure-Python HPACK header compression" -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -903,7 +1065,6 @@ files = [ name = "hypercorn" version = "0.14.4" description = "A ASGI Server based on Hyper libraries and inspired by Gunicorn" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -928,7 +1089,6 @@ uvloop = ["uvloop"] name = "hyperframe" version = "6.0.1" description = "HTTP/2 framing layer for Python" -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -940,7 +1100,6 @@ files = [ name = "hypothesis" version = "6.81.2" description = "A library for property-based testing" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -973,7 +1132,6 @@ zoneinfo = ["backports.zoneinfo (>=0.2.1)", "tzdata (>=2023.3)"] name = "identify" version = "2.5.29" description = "File identification library for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -988,7 +1146,6 @@ license = ["ukkonen"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1000,7 +1157,6 @@ files = [ name = "importlib-metadata" version = "6.8.0" description = "Read metadata from Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1020,7 +1176,6 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1032,7 +1187,6 @@ files = [ name = "invoke" version = "2.2.0" description = "Pythonic task execution" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1044,7 +1198,6 @@ files = [ name = "ipython" version = "8.12.2" description = "IPython: Productive Interactive Computing" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1084,7 +1237,6 @@ test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pa name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -1098,11 +1250,21 @@ pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib" plugins = ["setuptools"] requirements-deprecated-finder = ["pip-api", "pipreqs"] +[[package]] +name = "iterators" +version = "0.0.2" +description = "Iterator utility classes and functions" +optional = false +python-versions = ">=3.6" +files = [ + {file = "iterators-0.0.2-py3-none-any.whl", hash = "sha256:ac2a9d8af1dd9eed051ccab4a1905a1343d66bbc9f451567d94f6e2744f30fce"}, + {file = "iterators-0.0.2.tar.gz", hash = "sha256:4f6a5b39c3c724edd5c7231a589d463ad50357cdc35494a3c71730795b78eb50"}, +] + [[package]] name = "itsdangerous" version = "2.1.2" description = "Safely pass data to untrusted environments and back." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1114,7 +1276,6 @@ files = [ name = "jedi" version = "0.19.0" description = "An autocompletion tool for Python that can be used for text editors." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1134,7 +1295,6 @@ testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1152,7 +1312,6 @@ i18n = ["Babel (>=2.7)"] name = "joblib" version = "1.3.2" description = "Lightweight pipelining with Python functions" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1164,7 +1323,6 @@ files = [ name = "kombu" version = "5.3.2" description = "Messaging library for Python." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1199,7 +1357,6 @@ zookeeper = ["kazoo (>=2.8.0)"] name = "lazy-object-proxy" version = "1.9.0" description = "A fast and thorough lazy object proxy." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1245,7 +1402,6 @@ files = [ name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1315,7 +1471,6 @@ files = [ name = "matplotlib-inline" version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1330,7 +1485,6 @@ traitlets = "*" name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1342,7 +1496,6 @@ files = [ name = "mipdb" version = "2.4.7" description = "" -category = "dev" optional = false python-versions = ">=3.8,<3.9" files = [ @@ -1363,7 +1516,6 @@ toml = ">=0.10.2,<0.11.0" name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1375,7 +1527,6 @@ files = [ name = "nodeenv" version = "1.8.0" description = "Node.js virtual environment builder" -category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" files = [ @@ -1390,7 +1541,6 @@ setuptools = "*" name = "numpy" version = "1.24.1" description = "Fundamental package for array computing in Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1428,7 +1578,6 @@ files = [ name = "packaging" version = "23.1" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1440,7 +1589,6 @@ files = [ name = "pandas" version = "1.5.2" description = "Powerful data structures for data analysis, time series, and statistics" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1485,7 +1633,6 @@ test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] name = "pandera" version = "0.13.4" description = "A light-weight and flexible data validation and testing tool for statistical data objects." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1519,7 +1666,6 @@ strategies = ["hypothesis (>=5.41.1)"] name = "parso" version = "0.8.3" description = "A Python Parser" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1535,7 +1681,6 @@ testing = ["docopt", "pytest (<6.0.0)"] name = "pathspec" version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1547,7 +1692,6 @@ files = [ name = "patsy" version = "0.5.3" description = "A Python package for describing statistical models and for building design matrices." -category = "main" optional = false python-versions = "*" files = [ @@ -1566,7 +1710,6 @@ test = ["pytest", "pytest-cov", "scipy"] name = "pdbpp" version = "0.10.3" description = "pdb++, a drop-in replacement for pdb" -category = "dev" optional = false python-versions = "*" files = [ @@ -1587,7 +1730,6 @@ testing = ["funcsigs", "pytest"] name = "pexpect" version = "4.8.0" description = "Pexpect allows easy control of interactive console applications." -category = "dev" optional = false python-versions = "*" files = [ @@ -1602,7 +1744,6 @@ ptyprocess = ">=0.5" name = "pickleshare" version = "0.7.5" description = "Tiny 'shelve'-like database with concurrency support" -category = "dev" optional = false python-versions = "*" files = [ @@ -1614,7 +1755,6 @@ files = [ name = "platformdirs" version = "3.10.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1630,7 +1770,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1646,7 +1785,6 @@ testing = ["pytest", "pytest-benchmark"] name = "pre-commit" version = "3.3.3" description = "A framework for managing and maintaining multi-language pre-commit hooks." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1665,7 +1803,6 @@ virtualenv = ">=20.10.0" name = "priority" version = "2.0.0" description = "A pure-Python implementation of the HTTP/2 priority tree" -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -1677,7 +1814,6 @@ files = [ name = "prompt-toolkit" version = "3.0.39" description = "Library for building powerful interactive command lines in Python" -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -1688,28 +1824,49 @@ files = [ [package.dependencies] wcwidth = "*" +[[package]] +name = "protobuf" +version = "4.25.3" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, + {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, + {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, + {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, + {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, + {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, + {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, + {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, + {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, +] + [[package]] name = "psutil" -version = "5.9.5" +version = "5.9.8" description = "Cross-platform lib for process and system monitoring in Python." -category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, - {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"}, - {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"}, - {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"}, - {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"}, - {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"}, - {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"}, - {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"}, - {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"}, - {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"}, - {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"}, - {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"}, - {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"}, - {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"}, + {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, + {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, + {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, + {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, + {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, + {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, + {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, + {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, + {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, + {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, ] [package.extras] @@ -1719,7 +1876,6 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" -category = "dev" optional = false python-versions = "*" files = [ @@ -1731,7 +1887,6 @@ files = [ name = "pure-eval" version = "0.2.2" description = "Safely evaluate AST nodes without side effects" -category = "dev" optional = false python-versions = "*" files = [ @@ -1742,11 +1897,62 @@ files = [ [package.extras] tests = ["pytest"] +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pycryptodome" +version = "3.20.0" +description = "Cryptographic library for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pycryptodome-3.20.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:f0e6d631bae3f231d3634f91ae4da7a960f7ff87f2865b2d2b831af1dfb04e9a"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:baee115a9ba6c5d2709a1e88ffe62b73ecc044852a925dcb67713a288c4ec70f"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:417a276aaa9cb3be91f9014e9d18d10e840a7a9b9a9be64a42f553c5b50b4d1d"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a1250b7ea809f752b68e3e6f3fd946b5939a52eaeea18c73bdab53e9ba3c2dd"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:d5954acfe9e00bc83ed9f5cb082ed22c592fbbef86dc48b907238be64ead5c33"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-win32.whl", hash = "sha256:06d6de87c19f967f03b4cf9b34e538ef46e99a337e9a61a77dbe44b2cbcf0690"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ec0bb1188c1d13426039af8ffcb4dbe3aad1d7680c35a62d8eaf2a529b5d3d4f"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5601c934c498cd267640b57569e73793cb9a83506f7c73a8ec57a516f5b0b091"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d29daa681517f4bc318cd8a23af87e1f2a7bad2fe361e8aa29c77d652a065de4"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3427d9e5310af6680678f4cce149f54e0bb4af60101c7f2c16fdf878b39ccccc"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:3cd3ef3aee1079ae44afaeee13393cf68b1058f70576b11439483e34f93cf818"}, + {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac1c7c0624a862f2e53438a15c9259d1655325fc2ec4392e66dc46cdae24d044"}, + {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:76658f0d942051d12a9bd08ca1b6b34fd762a8ee4240984f7c06ddfb55eaf15a"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f35d6cee81fa145333137009d9c8ba90951d7d77b67c79cbe5f03c7eb74d8fe2"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cb39afede7055127e35a444c1c041d2e8d2f1f9c121ecef573757ba4cd2c3c"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a4c4dc60b78ec41d2afa392491d788c2e06edf48580fbfb0dd0f828af49d25"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fb3b87461fa35afa19c971b0a2b7456a7b1db7b4eba9a8424666104925b78128"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:acc2614e2e5346a4a4eab6e199203034924313626f9620b7b4b38e9ad74b7e0c"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:210ba1b647837bfc42dd5a813cdecb5b86193ae11a3f5d972b9a0ae2c7e9e4b4"}, + {file = "pycryptodome-3.20.0-cp35-abi3-win32.whl", hash = "sha256:8d6b98d0d83d21fb757a182d52940d028564efe8147baa9ce0f38d057104ae72"}, + {file = "pycryptodome-3.20.0-cp35-abi3-win_amd64.whl", hash = "sha256:9b3ae153c89a480a0ec402e23db8d8d84a3833b65fa4b15b81b83be9d637aab9"}, + {file = "pycryptodome-3.20.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:4401564ebf37dfde45d096974c7a159b52eeabd9969135f0426907db367a652a"}, + {file = "pycryptodome-3.20.0-pp27-pypy_73-win32.whl", hash = "sha256:ec1f93feb3bb93380ab0ebf8b859e8e5678c0f010d2d78367cf6bc30bfeb148e"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:acae12b9ede49f38eb0ef76fdec2df2e94aad85ae46ec85be3648a57f0a7db04"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f47888542a0633baff535a04726948e876bf1ed880fddb7c10a736fa99146ab3"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e0e4a987d38cfc2e71b4a1b591bae4891eeabe5fa0f56154f576e26287bfdea"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c18b381553638414b38705f07d1ef0a7cf301bc78a5f9bc17a957eb19446834b"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a60fedd2b37b4cb11ccb5d0399efe26db9e0dd149016c1cc6c8161974ceac2d6"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:405002eafad114a2f9a930f5db65feef7b53c4784495dd8758069b89baf68eab"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ab6ab0cb755154ad14e507d1df72de9897e99fd2d4922851a276ccc14f4f1a5"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:acf6e43fa75aca2d33e93409f2dafe386fe051818ee79ee8a3e21de9caa2ac9e"}, + {file = "pycryptodome-3.20.0.tar.gz", hash = "sha256:09609209ed7de61c2b560cc5c8c4fbf892f8b15b1faf7e4cbffac97db1fffda7"}, +] + [[package]] name = "pydantic" version = "1.10.13" description = "Data validation and settings management using python type hints" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1799,7 +2005,6 @@ email = ["email-validator (>=1.0.3)"] name = "pygments" version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1814,7 +2019,6 @@ plugins = ["importlib-metadata"] name = "pylint" version = "2.17.6" description = "python code static checker" -category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -1841,7 +2045,6 @@ testutils = ["gitpython (>3)"] name = "pymonetdb" version = "1.6.3" description = "Native MonetDB client Python API" -category = "main" optional = false python-versions = "*" files = [ @@ -1857,7 +2060,6 @@ test = ["mypy", "pycodestyle", "pytest", "types-setuptools"] name = "pyreadline" version = "2.1" description = "A python implmementation of GNU readline." -category = "dev" optional = false python-versions = "*" files = [ @@ -1868,7 +2070,6 @@ files = [ name = "pyrepl" version = "0.9.0" description = "A library for building flexible command line interfaces" -category = "dev" optional = false python-versions = "*" files = [ @@ -1879,7 +2080,6 @@ files = [ name = "pytest" version = "7.4.2" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1902,7 +2102,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-assume" version = "2.4.3" description = "A pytest plugin that allows multiple failures per test" -category = "dev" optional = false python-versions = "*" files = [ @@ -1918,7 +2117,6 @@ six = "*" name = "pytest-asyncio" version = "0.21.1" description = "Pytest support for asyncio" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1937,7 +2135,6 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1956,7 +2153,6 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-mock" version = "3.11.1" description = "Thin-wrapper around the mock package for easier use with pytest" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1974,7 +2170,6 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] name = "pytest-rerunfailures" version = "12.0" description = "pytest plugin to re-run tests to eliminate flaky failures" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1990,7 +2185,6 @@ pytest = ">=6.2" name = "pytest-subtests" version = "0.11.0" description = "unittest subTest() support and subtests fixture" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2006,7 +2200,6 @@ pytest = ">=7.0" name = "pytest-xdist" version = "3.3.1" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2027,7 +2220,6 @@ testing = ["filelock"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -2042,7 +2234,6 @@ six = ">=1.5" name = "pytoolconfig" version = "1.2.5" description = "Python tool configuration" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2065,7 +2256,6 @@ validation = ["pydantic (>=1.7.4)"] name = "pytz" version = "2023.3.post1" description = "World timezone definitions, modern and historical" -category = "main" optional = false python-versions = "*" files = [ @@ -2077,7 +2267,6 @@ files = [ name = "pywin32" version = "306" description = "Python for Window Extensions" -category = "dev" optional = false python-versions = "*" files = [ @@ -2101,7 +2290,6 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2123,6 +2311,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -2161,7 +2350,6 @@ files = [ name = "quart" version = "0.18.4" description = "A Python ASGI web microframework with the same API as Flask" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2188,7 +2376,6 @@ dotenv = ["python-dotenv"] name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2210,7 +2397,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "rope" version = "1.9.0" description = "a python refactoring library..." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2230,7 +2416,6 @@ release = ["pip-tools (>=6.12.1)", "toml (>=0.10.2)", "twine (>=4.0.2)"] name = "scikit-learn" version = "1.2.0" description = "A set of python modules for machine learning and data mining" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2273,7 +2458,6 @@ tests = ["black (>=22.3.0)", "flake8 (>=3.8.2)", "matplotlib (>=3.1.3)", "mypy ( name = "scipy" version = "1.10.0" description = "Fundamental algorithms for scientific computing in Python" -category = "main" optional = false python-versions = "<3.12,>=3.8" files = [ @@ -2312,7 +2496,6 @@ test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeo name = "setuptools" version = "68.2.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2329,7 +2512,6 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -2341,7 +2523,6 @@ files = [ name = "sortedcontainers" version = "2.4.0" description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" -category = "dev" optional = false python-versions = "*" files = [ @@ -2353,7 +2534,6 @@ files = [ name = "sqlalchemy" version = "1.3.24" description = "Database Abstraction Library" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2409,7 +2589,6 @@ pymysql = ["pymysql", "pymysql (<1)"] name = "sqlalchemy-monetdb" version = "1.0.0" description = "SQLAlchemy dialect for MonetDB" -category = "dev" optional = false python-versions = "*" files = [ @@ -2427,7 +2606,6 @@ lite = ["monetdblite", "numpy"] name = "stack-data" version = "0.6.2" description = "Extract data from python stack frames and tracebacks for informative displays" -category = "dev" optional = false python-versions = "*" files = [ @@ -2447,7 +2625,6 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] name = "statsmodels" version = "0.13.2" description = "Statistical computations and models for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2492,7 +2669,6 @@ docs = ["ipykernel", "jupyter-client", "matplotlib", "nbconvert", "nbformat", "n name = "termcolor" version = "2.3.0" description = "ANSI color formatting for output in terminal" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2507,7 +2683,6 @@ tests = ["pytest", "pytest-cov"] name = "threadpoolctl" version = "3.2.0" description = "threadpoolctl" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2519,7 +2694,6 @@ files = [ name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -2531,7 +2705,6 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2543,7 +2716,6 @@ files = [ name = "tomlkit" version = "0.12.1" description = "Style preserving TOML library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2555,7 +2727,6 @@ files = [ name = "tqdm" version = "4.65.2" description = "Fast, Extensible Progress Meter" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2576,7 +2747,6 @@ telegram = ["requests"] name = "traitlets" version = "5.10.1" description = "Traitlets Python configuration system" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2592,7 +2762,6 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.5.1)", "pre-commit", "pytest (>=7.0, name = "typing-extensions" version = "4.8.0" description = "Backported and Experimental Type Hints for Python 3.8+" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2604,7 +2773,6 @@ files = [ name = "typing-inspect" version = "0.9.0" description = "Runtime inspection utilities for typing module." -category = "dev" optional = false python-versions = "*" files = [ @@ -2620,7 +2788,6 @@ typing-extensions = ">=3.7.4" name = "tzdata" version = "2023.3" description = "Provider of IANA time zone data" -category = "main" optional = false python-versions = ">=2" files = [ @@ -2632,7 +2799,6 @@ files = [ name = "urllib3" version = "2.0.5" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2650,7 +2816,6 @@ zstd = ["zstandard (>=0.18.0)"] name = "vine" version = "5.0.0" description = "Promises, promises, promises." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2662,7 +2827,6 @@ files = [ name = "virtualenv" version = "20.24.5" description = "Virtual Python Environment builder" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2683,7 +2847,6 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess name = "wcwidth" version = "0.2.6" description = "Measures the displayed width of unicode strings in a terminal" -category = "main" optional = false python-versions = "*" files = [ @@ -2695,7 +2858,6 @@ files = [ name = "websocket-client" version = "1.6.3" description = "WebSocket client for Python with low level API options" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2712,7 +2874,6 @@ test = ["websockets"] name = "werkzeug" version = "2.3.7" description = "The comprehensive WSGI web application library." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2730,7 +2891,6 @@ watchdog = ["watchdog (>=2.3)"] name = "wmctrl" version = "0.5" description = "A tool to programmatically control windows inside X" -category = "dev" optional = false python-versions = ">=2.7" files = [ @@ -2748,7 +2908,6 @@ test = ["pytest"] name = "wrapt" version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -2833,7 +2992,6 @@ files = [ name = "wsproto" version = "1.2.0" description = "WebSockets state-machine based protocol implementation" -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -2848,7 +3006,6 @@ h11 = ">=0.9.0,<1" name = "zipp" version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2863,4 +3020,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "~3.8" -content-hash = "aec9e3f0d7595f43c816b0a0d1689f82f3daf3678958b6f8c665e03e945332fc" +content-hash = "a2bea930cc10c9936a101beb532743be9caf8d7d253f3701eb113186da5bd572" diff --git a/pyproject.toml b/pyproject.toml index d22ccf6a5..ffc92cc04 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,8 @@ envtoml = "~0.1" requests = "~2.31" eventlet = "~0.33" patsy = "~0.5.3" +flwr = "1.7.0" +psutil = "^5.9.8" [tool.poetry.dev-dependencies] pytest = "~7.4" @@ -32,8 +34,8 @@ pytest-assume="~2.4" ipython = "~8.12" pdbpp = "~0.10" invoke = "~2.2" -black = "23.7.0" # Must be upgraded together with github actions -isort = "5.12.0" # Must be upgraded together with github actions +black = "23.7.0" # Must be upgraded together with github actions and pre-commit configuration +isort = "5.12.0" # Must be upgraded together with github actions and pre-commit configuration pre-commit = "~3.3" termcolor = "~2.3" coverage = {extras = ["toml"], version = "~7.2"} diff --git a/tasks.py b/tasks.py index ddc299f92..520e1f47e 100644 --- a/tasks.py +++ b/tasks.py @@ -201,6 +201,9 @@ def create_configs(c): controller_config[ "worker_landscape_aggregator_update_interval" ] = deployment_config["worker_landscape_aggregator_update_interval"] + controller_config["flower_execution_timeout"] = deployment_config[ + "flower_execution_timeout" + ] controller_config["rabbitmq"]["celery_tasks_timeout"] = deployment_config[ "celery_tasks_timeout" ] diff --git a/tests/algorithm_validation_tests/five_node_deployment_template.toml b/tests/algorithm_validation_tests/five_node_deployment_template.toml index b32045f1f..ad3e6af56 100644 --- a/tests/algorithm_validation_tests/five_node_deployment_template.toml +++ b/tests/algorithm_validation_tests/five_node_deployment_template.toml @@ -10,6 +10,7 @@ monetdb_memory_limit = 2048 # MB algorithm_folders = "./exareme2/algorithms/exareme2,./exareme2/algorithms/flower,./tests/algorithms" worker_landscape_aggregator_update_interval = 30 +flower_execution_timeout = 30 celery_tasks_timeout = 120 celery_cleanup_task_timeout=2 celery_run_udf_task_timeout = 300 diff --git a/tests/algorithm_validation_tests/flower/conftest.py b/tests/algorithm_validation_tests/flower/conftest.py new file mode 100644 index 000000000..023b50f80 --- /dev/null +++ b/tests/algorithm_validation_tests/flower/conftest.py @@ -0,0 +1,20 @@ +import pytest + +from tests.algorithm_validation_tests.flower.helpers import algorithm_request +from tests.algorithm_validation_tests.flower.helpers import parse_response + + +@pytest.fixture(scope="class") +def get_algorithm_result(): + cache = {} + + def _get_algorithm_result(algname, test_input): + test_case_num = test_input["test_case_num"] + key = (algname, test_case_num) + if key not in cache: + response = algorithm_request(algname, test_input) + result = parse_response(response) + cache[key] = result + return cache[key] + + return _get_algorithm_result diff --git a/tests/algorithm_validation_tests/flower/helpers.py b/tests/algorithm_validation_tests/flower/helpers.py new file mode 100644 index 000000000..93d209d53 --- /dev/null +++ b/tests/algorithm_validation_tests/flower/helpers.py @@ -0,0 +1,65 @@ +import functools +import json + +import numpy as np +import pytest +import requests + + +def algorithm_request(algorithm: str, input: dict): + url = "http://127.0.0.1:5000/algorithms" + f"/{algorithm}" + headers = {"Content-type": "application/json", "Accept": "text/plain"} + input["type"] = "flower" + response = requests.post(url, data=json.dumps(input), headers=headers) + return response + + +def parse_response(response) -> dict: + if response.status_code != 200: + msg = f"Unexpected response status: '{response.status_code}'. " + msg += f"Response message: '{response.content}'" + raise ValueError(msg) + try: + result = json.loads(response.content) + except json.decoder.JSONDecodeError: + raise ValueError(f"The result is not valid json:\n{response.content}") from None + return result + + +def get_test_params(expected_file, slc=None, skip_indices=None, skip_reason=None): + """ + Gets parameters for algorithm validation tests from expected file + + Can get the whole list present in the expected file or a given slice. Can + also skip some tests based on their indices. + + Parameters + ---------- + expected_file : pathlib.Path + File in json format containing a list of test cases, where a test case + is a pair of input/output for a given algorithm + slc : slice | None + If not None it gets only the given slice + skip_indices : list[int] | None + Indices of tests to skip + skip_reason : str | None + Reason for skipping tests, combine with previous parameter + """ + with expected_file.open() as f: + params = json.load(f)["test_cases"] + slc = slc or slice(len(params)) + params = [(p["input"], p["output"]) for p in params[slc]] + + def skip(*param): + return pytest.param(*param, marks=pytest.mark.skip(reason=skip_reason)) + + if skip_indices: + params = [skip(*p) if i in skip_indices else p for i, p in enumerate(params)] + return params + + +assert_allclose = functools.partial( + np.testing.assert_allclose, + rtol=1e-6, + atol=1e-9, +) diff --git a/tests/algorithm_validation_tests/flower/test_logistic_regression.py b/tests/algorithm_validation_tests/flower/test_logistic_regression.py new file mode 100644 index 000000000..5c4cc744d --- /dev/null +++ b/tests/algorithm_validation_tests/flower/test_logistic_regression.py @@ -0,0 +1,28 @@ +def test_logistic_regression(get_algorithm_result): + input = { + "inputdata": { + "y": ["gender"], + "x": ["lefthippocampus"], + "data_model": "dementia:0.1", + "datasets": [ + "ppmi0", + "ppmi1", + "ppmi2", + "ppmi3", + "ppmi5", + "ppmi6", + "edsd6", + "ppmi7", + "ppmi8", + "ppmi9", + ], + "filters": None, + }, + "parameters": None, + "test_case_num": 99, + } + input["type"] = "flower" + algorithm_result = get_algorithm_result("logistic_regression", input) + assert {"accuracy": 0.3813682678311499} == algorithm_result or algorithm_result == { + "accuracy": 0.61863173216885 + } diff --git a/tests/algorithm_validation_tests/flower/test_mnist_logistic_regression.py b/tests/algorithm_validation_tests/flower/test_mnist_logistic_regression.py new file mode 100644 index 000000000..319a78b97 --- /dev/null +++ b/tests/algorithm_validation_tests/flower/test_mnist_logistic_regression.py @@ -0,0 +1,27 @@ +def test_mnist_logistic_regression(get_algorithm_result): + input = { + "inputdata": { + "y": ["gender"], + "x": ["lefthippocampus"], + "data_model": "dementia:0.1", + "datasets": [ + "ppmi0", + "ppmi1", + "ppmi2", + "ppmi3", + "ppmi5", + "ppmi6", + "edsd6", + "ppmi7", + "ppmi8", + "ppmi9", + ], + "filters": None, + }, + "parameters": None, + "test_case_num": 99, + } + input["type"] = "flower" + algorithm_result = get_algorithm_result("mnist_logistic_regression", input) + assert "accuracy" in algorithm_result + assert {"accuracy": 0.8486} == algorithm_result diff --git a/tests/algorithm_validation_tests/one_node_deployment_template.toml b/tests/algorithm_validation_tests/one_node_deployment_template.toml index 01b01c871..afd5f782e 100644 --- a/tests/algorithm_validation_tests/one_node_deployment_template.toml +++ b/tests/algorithm_validation_tests/one_node_deployment_template.toml @@ -10,6 +10,7 @@ monetdb_memory_limit = 4096 # MB algorithm_folders = "./exareme2/algorithms/exareme2,./exareme2/algorithms/flower,./tests/algorithms" worker_landscape_aggregator_update_interval = 30 +flower_execution_timeout = 30 celery_tasks_timeout = 60 celery_cleanup_task_timeout=2 celery_run_udf_task_timeout = 120 diff --git a/tests/prod_env_tests/deployment_configs/kind_configuration/kind_cluster.yaml b/tests/prod_env_tests/deployment_configs/kind_configuration/kind_cluster.yaml index 21c9bea9b..df0c893cf 100644 --- a/tests/prod_env_tests/deployment_configs/kind_configuration/kind_cluster.yaml +++ b/tests/prod_env_tests/deployment_configs/kind_configuration/kind_cluster.yaml @@ -28,12 +28,3 @@ nodes: containerPath: /opt/exareme2/csvs - hostPath: ./tests/prod_env_tests/deployment_configs/kind_configuration/worker2/monetdb_password.sh containerPath: /opt/exareme2/credentials/monetdb_password.sh - -- role: worker - extraMounts: - - hostPath: ./tests/prod_env_tests/deployment_configs/kind_configuration/worker3/hostname - containerPath: /etc/hostname - - hostPath: ./tests/test_data - containerPath: /opt/exareme2/csvs - - hostPath: ./tests/prod_env_tests/deployment_configs/kind_configuration/worker3/monetdb_password.sh - containerPath: /opt/exareme2/credentials/monetdb_password.sh \ No newline at end of file diff --git a/tests/prod_env_tests/deployment_configs/kind_configuration/worker3/hostname b/tests/prod_env_tests/deployment_configs/kind_configuration/worker3/hostname deleted file mode 100644 index 834577e31..000000000 --- a/tests/prod_env_tests/deployment_configs/kind_configuration/worker3/hostname +++ /dev/null @@ -1 +0,0 @@ -localworker3 diff --git a/tests/prod_env_tests/deployment_configs/kind_configuration/worker3/monetdb_password.sh b/tests/prod_env_tests/deployment_configs/kind_configuration/worker3/monetdb_password.sh deleted file mode 100644 index bdfdc2c47..000000000 --- a/tests/prod_env_tests/deployment_configs/kind_configuration/worker3/monetdb_password.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/bash -export MONETDB_LOCAL_PASSWORD="worker3" \ No newline at end of file diff --git a/tests/prod_env_tests/deployment_configs/kubernetes_values.yaml b/tests/prod_env_tests/deployment_configs/kubernetes_values.yaml index 2a7ed3a77..78567a44b 100644 --- a/tests/prod_env_tests/deployment_configs/kubernetes_values.yaml +++ b/tests/prod_env_tests/deployment_configs/kubernetes_values.yaml @@ -1,4 +1,4 @@ -localnodes: 3 +localnodes: 2 exareme2_images: repository: madgik @@ -19,6 +19,7 @@ db: controller: worker_landscape_aggregator_update_interval: 20 + flower_execution_timeout: 20 celery_tasks_timeout: 120 workers_cleanup_interval: 60 cleanup_file_folder: /opt/cleanup diff --git a/tests/smpc_env_tests/deployment_configs/kubernetes_values.yaml b/tests/smpc_env_tests/deployment_configs/kubernetes_values.yaml index c40a42ed7..6334f6a14 100644 --- a/tests/smpc_env_tests/deployment_configs/kubernetes_values.yaml +++ b/tests/smpc_env_tests/deployment_configs/kubernetes_values.yaml @@ -15,6 +15,7 @@ db: controller: worker_landscape_aggregator_update_interval: 30 + flower_execution_timeout: 30 celery_tasks_timeout: 20 celery_run_udf_task_timeout: 120 workers_cleanup_interval: 60 diff --git a/tests/standalone_tests/algorithms/flower/test_process_manager.py b/tests/standalone_tests/algorithms/flower/test_process_manager.py new file mode 100644 index 000000000..34bd4f9ea --- /dev/null +++ b/tests/standalone_tests/algorithms/flower/test_process_manager.py @@ -0,0 +1,120 @@ +import os +import signal +import unittest +from unittest import mock +from unittest.mock import MagicMock +from unittest.mock import patch + +import psutil + +from exareme2.algorithms.flower.process_manager import ALGORITHMS_ROOT +from exareme2.algorithms.flower.process_manager import FlowerProcess +from exareme2.algorithms.flower.process_manager import handle_zombie +from exareme2.algorithms.flower.process_manager import terminate_process + + +class TestFlowerProcess(unittest.TestCase): + @patch("exareme2.algorithms.flower.process_manager.subprocess.Popen") + def test_start_process(self, mock_popen): + """Test starting a process successfully.""" + process = FlowerProcess("script.py") + logger = MagicMock() + mock_popen.return_value.pid = 12345 + + expected_script_path = os.path.join(ALGORITHMS_ROOT, "script.py") + + # Starting the process + pid = process.start(logger) + + # Construct the expected command + expected_command = ( + f"Executing command: ['poetry', 'run', 'python', '{expected_script_path}']" + ) + + # Assert the expected command is logged + logger.info.assert_called_with(expected_command) + self.assertEqual(pid, 12345) + + @patch("exareme2.algorithms.flower.process_manager.process_status") + @patch("exareme2.algorithms.flower.process_manager.psutil.Process") + def test_terminate_process(self, mock_psutil_process, mock_process_status): + """Test terminating a process.""" + mock_process = MagicMock() + mock_psutil_process.return_value = mock_process + logger = MagicMock() + + # Setup the process status to simulate process still running initially then stopping + mock_process_status.side_effect = [ + "running", # Status before SIGTERM + "not running", # Status after SIGTERM + ] + + # Call the function to test + terminate_process(mock_process, logger) + + # Check that terminate and wait were called + mock_process.terminate.assert_called() + mock_process.wait.assert_called() + + @patch("exareme2.algorithms.flower.process_manager.os.waitpid") + @patch("exareme2.algorithms.flower.process_manager.psutil.Process") + def test_handle_zombie(self, mock_psutil_process, mock_waitpid): + """Test handling of a zombie process.""" + # Setup the mocked process + mock_process = MagicMock() + mock_process.pid = 1234 # Setting the PID for the process + mock_psutil_process.return_value = mock_process + mock_process.status.return_value = psutil.STATUS_ZOMBIE + + # Set up the logger + logger = MagicMock() + + # Simulate os.waitpid indicating the process has been reaped successfully + mock_waitpid.return_value = (1234, 0) # (pid, status) + + # Execute the function + handle_zombie(mock_process, logger) + + # Assert that waitpid was called + mock_waitpid.assert_called_with(1234, 0) + logger.info.assert_called_with("Zombie process 1234 reaped successfully.") + + @patch("exareme2.algorithms.flower.process_manager.terminate_process") + @patch("exareme2.algorithms.flower.process_manager.psutil.Process") + def test_kill_process_class_method( + self, mock_psutil_process, mock_terminate_process + ): + """Test the class method for killing a process based on algorithm name.""" + mock_process = MagicMock() + mock_psutil_process.return_value = mock_process + mock_process.cmdline.return_value = ["python", "script.py"] + logger = MagicMock() + FlowerProcess.kill_process(1234, "script", logger) + mock_terminate_process.assert_called_once_with(mock_process, logger) + + @patch("exareme2.algorithms.flower.process_manager.terminate_process") + @patch("exareme2.algorithms.flower.process_manager.psutil.Process") + def test_kill_process_access_denied( + self, mock_psutil_process, mock_terminate_process + ): + """Test handling access denied error during process kill based on the algorithm name.""" + # Create a mock process object with appropriate attributes + mock_process = MagicMock() + mock_psutil_process.return_value = mock_process + mock_process.cmdline.return_value = ["python", "some_algorithm_script.py"] + + # Set up terminate_process to raise an AccessDenied exception + mock_terminate_process.side_effect = psutil.AccessDenied(pid=1234) + + logger = MagicMock() + + # Assume PID 1234 and the algorithm name 'some_algorithm' is sufficient to identify the process + FlowerProcess.kill_process(1234, "some_algorithm", logger) + + # Assert that terminate_process was called, thus confirming the algorithm name check passed + mock_terminate_process.assert_called_once_with(mock_process, logger) + + # Check if the error was handled and logged correctly + logger.error.assert_called_with( + f"Access denied when attempting to terminate PID 1234." + ) diff --git a/tests/standalone_tests/controller/services/exareme2/test_cleanup_after_algorithm_execution.py b/tests/standalone_tests/controller/services/exareme2/test_cleanup_after_algorithm_execution.py index 11c6d8f33..f29165a7f 100644 --- a/tests/standalone_tests/controller/services/exareme2/test_cleanup_after_algorithm_execution.py +++ b/tests/standalone_tests/controller/services/exareme2/test_cleanup_after_algorithm_execution.py @@ -59,6 +59,7 @@ def controller_config(): "framework_log_level": "INFO", "deployment_type": "LOCAL", "worker_landscape_aggregator_update_interval": 30, + "flower_execution_timeout": 30, "cleanup": { "contextids_cleanup_folder": "/tmp/test_cleanup_entries", "workers_cleanup_interval": 2, diff --git a/tests/standalone_tests/algorithms/exareme2/test_single_local_worker_algorithm_execution.py b/tests/standalone_tests/controller/services/exareme2/test_single_local_worker_algorithm_execution.py similarity index 99% rename from tests/standalone_tests/algorithms/exareme2/test_single_local_worker_algorithm_execution.py rename to tests/standalone_tests/controller/services/exareme2/test_single_local_worker_algorithm_execution.py index ef8f80c0c..b6c1adbbc 100644 --- a/tests/standalone_tests/algorithms/exareme2/test_single_local_worker_algorithm_execution.py +++ b/tests/standalone_tests/controller/services/exareme2/test_single_local_worker_algorithm_execution.py @@ -56,6 +56,7 @@ def controller_config(): "framework_log_level": "INFO", "deployment_type": "LOCAL", "worker_landscape_aggregator_update_interval": 30, + "flower_execution_timeout": 30, "localworkers": { "config_file": path.join( TEST_ENV_CONFIG_FOLDER, CONTROLLER_LOCALWORKER1_ADDRESSES_FILE diff --git a/tests/standalone_tests/controller/services/flower/test_flower_execution_info.py b/tests/standalone_tests/controller/services/flower/test_flower_execution_info.py new file mode 100644 index 000000000..7236808cd --- /dev/null +++ b/tests/standalone_tests/controller/services/flower/test_flower_execution_info.py @@ -0,0 +1,84 @@ +import asyncio +import unittest +from unittest.mock import Mock + +from exareme2.controller.services.api.algorithm_request_dtos import ( + AlgorithmInputDataDTO, +) +from exareme2.controller.services.flower import FlowerIORegistry +from exareme2.controller.services.flower.flower_io_registry import Status + + +class TestFlowerExecutionInfo(unittest.TestCase): + def setUp(self): + self.loop = asyncio.new_event_loop() # Create a new event loop + asyncio.set_event_loop( + self.loop + ) # Set the newly created event loop as the current event loop + + self.logger = Mock() + self.info = FlowerIORegistry(20, self.logger) + + def tearDown(self): + self.loop.close() # Close the loop at the end of the test + + def test_reset_sync_initial_state(self): + self.info._reset_sync() + self.assertEqual(self.info.get_status(), Status.RUNNING) + self.assertFalse(self.info.result_ready.is_set()) + + def test_set_result_success(self): + result = {"data": "some value"} + asyncio.run(self.info.set_result(result)) + self.assertEqual(self.info.get_status(), Status.SUCCESS) + self.assertTrue(self.info.result_ready.is_set()) + + def test_set_result_failure(self): + result = {"error": "some error"} + asyncio.run(self.info.set_result(result)) + self.assertEqual(self.info.get_status(), Status.FAILURE) + + def test_get_result(self): + result = {"data": "expected result"} + asyncio.run(self.info.set_result(result)) + retrieved_result = asyncio.run(self.info.get_result()) + self.assertEqual(retrieved_result, result) + + def test_set_inputdata(self): + new_data = AlgorithmInputDataDTO( + data_model="new model", datasets=["new dataset"] + ) + self.info.set_inputdata(new_data) + self.assertEqual(self.info.get_status(), Status.RUNNING) + self.assertEqual(self.info.get_inputdata(), new_data) + + +class TestFlowerExecutionInfoAsync(unittest.IsolatedAsyncioTestCase): + async def asyncSetUp(self): + self.logger = Mock() + self.info = FlowerIORegistry(20, self.logger) + + async def test_event_set_on_result(self): + """Test that the event is set when the result is set.""" + result = {"data": "some value"} + await self.info.set_result(result) + self.assertTrue( + self.info.result_ready.is_set(), "Event should be set after result is set" + ) + + async def test_get_result_waits_for_event(self): + """Test that get_result waits for the event to be set.""" + result = {"data": "expected result"} + # Start setting the result in the background + await self.info.set_result(result) + + # Now retrieve the result, should wait for the set_result to complete + retrieved_result = await self.info.get_result() + self.assertEqual(retrieved_result, result) + + async def test_event_reset_on_reset(self): + """Test that the event is reset when the info is reset.""" + await self.info.reset() + self.assertFalse( + self.info.result_ready.is_set(), "Event should be reset after calling reset" + ) diff --git a/tests/standalone_tests/controller/services/flower/test_pos_and_kw_args_in_algorithm_flow.py b/tests/standalone_tests/controller/services/flower/test_pos_and_kw_args_in_algorithm_flow.py new file mode 100644 index 000000000..8abb1e009 --- /dev/null +++ b/tests/standalone_tests/controller/services/flower/test_pos_and_kw_args_in_algorithm_flow.py @@ -0,0 +1,41 @@ +import json + +import pytest +import requests + +from tests.standalone_tests.conftest import ALGORITHMS_URL + + +@pytest.mark.slow +# @pytest.mark.skip +def test_pos_and_kw_args_in_algorithm_flow( + localworker1_worker_service, + load_data_localworker1, + controller_service_with_localworker1, +): + algorithm_name = "logistic_regression" + request_dict = { + "inputdata": { + "y": ["gender"], + "x": ["lefthippocampus"], + "data_model": "dementia:0.1", + "datasets": [ + "ppmi0", + "ppmi1", + "ppmi2", + "ppmi3", + ], + "filters": None, + }, + "type": "flower", + } + + algorithm_url = ALGORITHMS_URL + "/" + algorithm_name + + headers = {"Content-type": "application/json", "Accept": "text/plain"} + response = requests.post( + algorithm_url, + data=json.dumps(request_dict), + headers=headers, + ) + assert response.status_code == 200 diff --git a/tests/standalone_tests/controller/services/worker_landscape_agreggator/test_worker_landscape_aggregator.py b/tests/standalone_tests/controller/services/worker_landscape_agreggator/test_worker_landscape_aggregator.py index 41145c971..8d4039e2b 100644 --- a/tests/standalone_tests/controller/services/worker_landscape_agreggator/test_worker_landscape_aggregator.py +++ b/tests/standalone_tests/controller/services/worker_landscape_agreggator/test_worker_landscape_aggregator.py @@ -43,6 +43,7 @@ def controller_config(): controller_config = { "deployment_type": "LOCAL", "worker_landscape_aggregator_update_interval": 30, + "flower_execution_timeout": 30, "rabbitmq": { "celery_tasks_timeout": 5, "celery_run_udf_task_timeout": 10, diff --git a/tests/standalone_tests/controller/services/worker_landscape_agreggator/test_worker_landscape_aggregator_update_loop.py b/tests/standalone_tests/controller/services/worker_landscape_agreggator/test_worker_landscape_aggregator_update_loop.py index 696e11eef..3998a2722 100644 --- a/tests/standalone_tests/controller/services/worker_landscape_agreggator/test_worker_landscape_aggregator_update_loop.py +++ b/tests/standalone_tests/controller/services/worker_landscape_agreggator/test_worker_landscape_aggregator_update_loop.py @@ -26,6 +26,7 @@ def controller_config(): controller_config = { "deployment_type": "LOCAL", "worker_landscape_aggregator_update_interval": 30, + "flower_execution_timeout": 30, "localworkers": { "config_file": "./tests/standalone_tests/testing_env_configs/test_globalworker_localworker1_localworker2_localworkertmp_addresses.json" }, diff --git a/tests/standalone_tests/testing_env_configs/test_controller.toml b/tests/standalone_tests/testing_env_configs/test_controller.toml index f7d56b53e..1940ea0a3 100644 --- a/tests/standalone_tests/testing_env_configs/test_controller.toml +++ b/tests/standalone_tests/testing_env_configs/test_controller.toml @@ -2,6 +2,7 @@ log_level = "DEBUG" framework_log_level = "INFO" deployment_type = "LOCAL" worker_landscape_aggregator_update_interval = 30 +flower_execution_timeout = 30 [cleanup] contextids_cleanup_folder = "/tmp" diff --git a/tests/standalone_tests/testing_env_configs/test_external_smpc_controller.toml b/tests/standalone_tests/testing_env_configs/test_external_smpc_controller.toml index ef3f5fc75..c8e2ceefe 100644 --- a/tests/standalone_tests/testing_env_configs/test_external_smpc_controller.toml +++ b/tests/standalone_tests/testing_env_configs/test_external_smpc_controller.toml @@ -2,6 +2,7 @@ log_level = "DEBUG" framework_log_level = "INFO" deployment_type = "LOCAL" worker_landscape_aggregator_update_interval = 30 +flower_execution_timeout = 30 [cleanup] contextids_cleanup_folder = "/tmp" diff --git a/tests/standalone_tests/testing_env_configs/test_external_smpc_dp_controller.toml b/tests/standalone_tests/testing_env_configs/test_external_smpc_dp_controller.toml index 49ec44367..344b75e3f 100644 --- a/tests/standalone_tests/testing_env_configs/test_external_smpc_dp_controller.toml +++ b/tests/standalone_tests/testing_env_configs/test_external_smpc_dp_controller.toml @@ -2,6 +2,7 @@ log_level = "DEBUG" framework_log_level = "INFO" deployment_type = "LOCAL" worker_landscape_aggregator_update_interval = 30 +flower_execution_timeout = 30 [cleanup] contextids_cleanup_folder = "/tmp" diff --git a/tests/standalone_tests/testing_env_configs/test_smpc_controller.toml b/tests/standalone_tests/testing_env_configs/test_smpc_controller.toml index 6b3c710f7..4aafc16bd 100644 --- a/tests/standalone_tests/testing_env_configs/test_smpc_controller.toml +++ b/tests/standalone_tests/testing_env_configs/test_smpc_controller.toml @@ -2,6 +2,7 @@ log_level = "DEBUG" framework_log_level = "INFO" deployment_type = "LOCAL" worker_landscape_aggregator_update_interval = 30 +flower_execution_timeout = 30 [cleanup] contextids_cleanup_folder = "/tmp" diff --git a/tests/standalone_tests/controller/test_get_data_model_attributes.py b/tests/standalone_tests/worker/worker_info/test_get_data_model_attributes.py similarity index 100% rename from tests/standalone_tests/controller/test_get_data_model_attributes.py rename to tests/standalone_tests/worker/worker_info/test_get_data_model_attributes.py diff --git a/tests/standalone_tests/controller/test_get_data_model_cdes.py b/tests/standalone_tests/worker/worker_info/test_get_data_model_cdes.py similarity index 100% rename from tests/standalone_tests/controller/test_get_data_model_cdes.py rename to tests/standalone_tests/worker/worker_info/test_get_data_model_cdes.py diff --git a/tests/standalone_tests/controller/test_get_datasets_per_data_model.py b/tests/standalone_tests/worker/worker_info/test_get_datasets_per_data_model.py similarity index 100% rename from tests/standalone_tests/controller/test_get_datasets_per_data_model.py rename to tests/standalone_tests/worker/worker_info/test_get_datasets_per_data_model.py diff --git a/tests/standalone_tests/controller/test_get_worker_info.py b/tests/standalone_tests/worker/worker_info/test_get_worker_info.py similarity index 100% rename from tests/standalone_tests/controller/test_get_worker_info.py rename to tests/standalone_tests/worker/worker_info/test_get_worker_info.py