diff --git a/.github/workflows/build-and-deploy.yml b/.github/workflows/build-and-deploy.yml index 122ba6112..a8cb0a85a 100644 --- a/.github/workflows/build-and-deploy.yml +++ b/.github/workflows/build-and-deploy.yml @@ -5,6 +5,7 @@ on: - staging - release pull_request: + pull_request_target: repository_dispatch: types: build-and-deploy workflow_dispatch: @@ -25,26 +26,27 @@ jobs: build-and-deploy-datasets: runs-on: ubuntu-20.04 - environment: - name: ${{ github.ref }} - env: VERBOSE: 0 PYTHONUNBUFFERED: 1 - DATA_AWS_S3_BUCKET: ${{ secrets.DATA_AWS_S3_BUCKET }} - DATA_AWS_CLOUDFRONT_DISTRIBUTION_ID: ${{ secrets.DATA_AWS_CLOUDFRONT_DISTRIBUTION_ID }} - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: us-east-2 - GH_TOKEN: ${{ github.token }} steps: - name: "Checkout code" +# if: github.event.pull_request.head.repo.full_name == github.repository uses: actions/checkout@v3 with: fetch-depth: 0 submodules: true +# - name: "Checkout code (fork)" +# if: github.event.pull_request.head.repo.full_name != github.repository +# uses: actions/checkout@v3 +# with: +# fetch-depth: 0 +# submodules: true +# ref: ${{github.event.pull_request.head.ref}} +# repository: ${{github.event.pull_request.head.repo.full_name}} + - name: "Install system dependencies" run: | sudo apt-get install brotli pigz parallel python3 rename --yes -qq >/dev/null @@ -53,16 +55,15 @@ jobs: run: | pip3 install -r requirements.txt - - name: "Install awscli" + - name: "Rebuild, commit and push datasets (from a fork)" + if: github.event.pull_request.head.repo.full_name != github.repository run: | - pushd /tmp >/dev/null - curl -fsSL "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" - unzip -oqq awscliv2.zip - sudo ./aws/install --update - popd >/dev/null - aws --version + git config --global user.email "nextstrain-bot-from-fork" + git config --global user.name "nextstrain-bot-from-fork@nextstrain.org" - - name: "Rebuild, commit and push datasets" + ./scripts/rebuild --input-dir 'data/' --output-dir 'data_output/' --no-pull --push --repo="${GITHUB_REPOSITORY}" + + - name: "Rebuild, commit and push (non-release branch)" if: github.ref != 'refs/heads/release' run: | git config --global user.email "${{ secrets.BOT_GIT_USER_EMAIL }}" @@ -70,8 +71,10 @@ jobs: ./scripts/rebuild --input-dir 'data/' --output-dir 'data_output/' --push --repo="${GITHUB_REPOSITORY}" - - name: "Rebuild, commit, push and make a release" + - name: "Rebuild, commit, push and make a release (release branch)" if: github.ref == 'refs/heads/release' + env: + GH_TOKEN: ${{ github.token }} run: | git config --global user.email "${{ secrets.BOT_GIT_USER_EMAIL }}" git config --global user.name "${{ secrets.BOT_GIT_USER_NAME }}" @@ -79,18 +82,19 @@ jobs: ./scripts/rebuild --input-dir 'data/' --output-dir 'data_output/' --release --repo="${GITHUB_REPOSITORY}" - name: "Deploy dataset server" - if: ${{ endsWith(github.ref, '/master') || endsWith(github.ref, '/staging') || endsWith(github.ref, '/release') }} + if: ${{ (github.event.pull_request.head.repo.full_name == github.repository) && (endsWith(github.ref, '/master') || endsWith(github.ref, '/staging') || endsWith(github.ref, '/release')) }} + env: + DATA_AWS_S3_BUCKET: ${{ secrets.DATA_AWS_S3_BUCKET }} + DATA_AWS_CLOUDFRONT_DISTRIBUTION_ID: ${{ secrets.DATA_AWS_CLOUDFRONT_DISTRIBUTION_ID }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: us-east-2 run: | - ./scripts/upload 'data_output/' - - - name: "Upload build artifacts: dataset server" - uses: actions/upload-artifact@v3 - with: - name: server - path: ./data_output/* + pushd /tmp >/dev/null + curl -fsSL "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" + unzip -oqq awscliv2.zip + sudo ./aws/install --update + popd >/dev/null + aws --version - - name: "Upload build artifacts: zip archives" - uses: actions/upload-artifact@v3 - with: - name: zips - path: ./data_temp/* + ./scripts/upload 'data_output/'